From d5243200573cac3250d6689c25d22a5ea3b49047 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 10:24:59 -0400 Subject: [PATCH 01/49] bot: Update dependencies (bulk dependabot PRs) 03-11-2024 (#2399) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #2398 bot: Bump google.golang.org/grpc from 1.62.0 to 1.62.1 ⚠️ The following PRs were resolved manually due to merge conflicts: #2397 bot: Bump google.golang.org/protobuf from 1.32.0 to 1.33.0 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 4 ++-- go.sum | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 3d114d62ca..a35f170cb3 100644 --- a/go.mod +++ b/go.mod @@ -46,8 +46,8 @@ require ( go.opentelemetry.io/otel/sdk/metric v1.24.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc - google.golang.org/grpc v1.62.0 - google.golang.org/protobuf v1.32.0 + google.golang.org/grpc v1.62.1 + google.golang.org/protobuf v1.33.0 ) require ( diff --git a/go.sum b/go.sum index deb2f24823..9d119e9967 100644 --- a/go.sum +++ b/go.sum @@ -770,8 +770,8 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.62.0 h1:HQKZ/fa1bXkX1oFOvSjmZEUL8wLSaZTjCcLAlmZRtdk= -google.golang.org/grpc v1.62.0/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= +google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= +google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -783,8 +783,8 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= From c67bc5688fc950e7e4fc93a0a0173d41a4278963 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 14 Mar 2024 15:07:06 -0400 Subject: [PATCH 02/49] feat: Add PatchCollection (#2402) ## Relevant issue(s) Resolves #2389 ## Description Adds the PatchCollection command. Mutating anything but the collection name is currently disabled, we can expand this as we see fit, but for now I'd prefer to keep the initial PR small. This change means that the Collection Name is no longer always going to be the same as the Schema Name. --- cli/cli.go | 1 + cli/collection_patch.go | 69 +++++ cli/schema_patch.go | 2 +- client/db.go | 11 + client/mocks/db.go | 43 +++ db/collection.go | 280 ++++++++++++++++++ db/description/collection.go | 36 +++ db/errors.go | 260 ++++++++++------ db/txn_db.go | 25 ++ http/client.go | 19 ++ http/handler_store.go | 30 ++ tests/clients/cli/wrapper.go | 10 + tests/clients/http/wrapper.go | 7 + .../collection_description/simple_test.go | 42 +++ .../updates/add/collections_test.go | 107 +++++++ .../updates/add/sources_test.go | 39 +++ .../updates/copy/name_test.go | 98 ++++++ .../updates/move/name_test.go | 66 +++++ .../updates/remove/collections_test.go | 41 +++ .../updates/remove/name_test.go | 49 +++ .../updates/replace/fields_test.go | 39 +++ .../updates/replace/id_test.go | 146 +++++++++ .../updates/replace/indexes_test.go | 39 +++ .../updates/replace/name_test.go | 210 +++++++++++++ .../updates/replace/root_id_test.go | 39 +++ .../updates/replace/schema_version_id_test.go | 39 +++ .../updates/replace/sources_test.go | 39 +++ .../updates/test/name_test.go | 60 ++++ .../field/kind/foreign_object_array_test.go | 2 +- .../add/field/kind/foreign_object_test.go | 2 +- .../field/with_index_test.go} | 6 +- .../schema/updates/add/simple_test.go | 2 +- .../schema/updates/copy/simple_test.go | 2 +- .../schema/updates/replace/simple_test.go | 2 +- tests/integration/test_case.go | 12 + tests/integration/utils2.go | 19 ++ 36 files changed, 1798 insertions(+), 95 deletions(-) create mode 100644 cli/collection_patch.go create mode 100644 tests/integration/collection_description/simple_test.go create mode 100644 tests/integration/collection_description/updates/add/collections_test.go create mode 100644 tests/integration/collection_description/updates/add/sources_test.go create mode 100644 tests/integration/collection_description/updates/copy/name_test.go create mode 100644 tests/integration/collection_description/updates/move/name_test.go create mode 100644 tests/integration/collection_description/updates/remove/collections_test.go create mode 100644 tests/integration/collection_description/updates/remove/name_test.go create mode 100644 tests/integration/collection_description/updates/replace/fields_test.go create mode 100644 tests/integration/collection_description/updates/replace/id_test.go create mode 100644 tests/integration/collection_description/updates/replace/indexes_test.go create mode 100644 tests/integration/collection_description/updates/replace/name_test.go create mode 100644 tests/integration/collection_description/updates/replace/root_id_test.go create mode 100644 tests/integration/collection_description/updates/replace/schema_version_id_test.go create mode 100644 tests/integration/collection_description/updates/replace/sources_test.go create mode 100644 tests/integration/collection_description/updates/test/name_test.go rename tests/integration/schema/updates/{index/simple_test.go => add/field/with_index_test.go} (88%) diff --git a/cli/cli.go b/cli/cli.go index 4cdb8c443b..c40c6528d8 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -95,6 +95,7 @@ func NewDefraCommand() *cobra.Command { MakeCollectionUpdateCommand(), MakeCollectionCreateCommand(), MakeCollectionDescribeCommand(), + MakeCollectionPatchCommand(), ) client := MakeClientCommand() diff --git a/cli/collection_patch.go b/cli/collection_patch.go new file mode 100644 index 0000000000..49d5a91305 --- /dev/null +++ b/cli/collection_patch.go @@ -0,0 +1,69 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "fmt" + "io" + "os" + + "github.com/spf13/cobra" +) + +func MakeCollectionPatchCommand() *cobra.Command { + var patchFile string + var cmd = &cobra.Command{ + Use: "patch [patch]", + Short: "Patch existing collection descriptions", + Long: `Patch existing collection descriptions. + +Uses JSON Patch to modify collection descriptions. + +Example: patch from an argument string: + defradb client collection patch '[{ "op": "add", "path": "...", "value": {...} }]' + +Example: patch from file: + defradb client collection patch -p patch.json + +Example: patch from stdin: + cat patch.json | defradb client collection patch - + +To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.source.network.`, + Args: cobra.RangeArgs(0, 1), + RunE: func(cmd *cobra.Command, args []string) error { + store := mustGetContextStore(cmd) + + var patch string + switch { + case patchFile != "": + data, err := os.ReadFile(patchFile) + if err != nil { + return err + } + patch = string(data) + case len(args) > 0 && args[0] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) + if err != nil { + return err + } + patch = string(data) + case len(args) == 1: + patch = args[0] + default: + return fmt.Errorf("patch cannot be empty") + } + + return store.PatchCollection(cmd.Context(), patch) + }, + } + cmd.Flags().StringVarP(&patchFile, "patch-file", "p", "", "File to load a patch from") + return cmd +} diff --git a/cli/schema_patch.go b/cli/schema_patch.go index 23f425396d..cf9224d204 100644 --- a/cli/schema_patch.go +++ b/cli/schema_patch.go @@ -37,7 +37,7 @@ Example: patch from an argument string: defradb client schema patch '[{ "op": "add", "path": "...", "value": {...} }]' '{"lenses": [...' Example: patch from file: - defradb client schema patch -f patch.json + defradb client schema patch -p patch.json Example: patch from stdin: cat patch.json | defradb client schema patch - diff --git a/client/db.go b/client/db.go index 7b0cc8060f..660c03998f 100644 --- a/client/db.go +++ b/client/db.go @@ -120,6 +120,17 @@ type Store interface { // A lens configuration may also be provided, it will be added to all collections using the schema. PatchSchema(context.Context, string, immutable.Option[model.Lens], bool) error + // PatchCollection takes the given JSON patch string and applies it to the set of CollectionDescriptions + // present in the database. + // + // It will also update the GQL types used by the query system. It will error and not apply any of the + // requested, valid updates should the net result of the patch result in an invalid state. The + // individual operations defined in the patch do not need to result in a valid state, only the net result + // of the full patch. + // + // Currently only the collection name can be modified. + PatchCollection(context.Context, string) error + // SetActiveSchemaVersion activates all collection versions with the given schema version, and deactivates all // those without it (if they share the same schema root). // diff --git a/client/mocks/db.go b/client/mocks/db.go index aeb54ea4cd..c6f6711a59 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -857,6 +857,49 @@ func (_c *DB_NewTxn_Call) RunAndReturn(run func(context.Context, bool) (datastor return _c } +// PatchCollection provides a mock function with given fields: _a0, _a1 +func (_m *DB) PatchCollection(_a0 context.Context, _a1 string) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DB_PatchCollection_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PatchCollection' +type DB_PatchCollection_Call struct { + *mock.Call +} + +// PatchCollection is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 string +func (_e *DB_Expecter) PatchCollection(_a0 interface{}, _a1 interface{}) *DB_PatchCollection_Call { + return &DB_PatchCollection_Call{Call: _e.mock.On("PatchCollection", _a0, _a1)} +} + +func (_c *DB_PatchCollection_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_PatchCollection_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *DB_PatchCollection_Call) Return(_a0 error) *DB_PatchCollection_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *DB_PatchCollection_Call) RunAndReturn(run func(context.Context, string) error) *DB_PatchCollection_Call { + _c.Call.Return(run) + return _c +} + // PatchSchema provides a mock function with given fields: _a0, _a1, _a2, _a3 func (_m *DB) PatchSchema(_a0 context.Context, _a1 string, _a2 immutable.Option[model.Lens], _a3 bool) error { ret := _m.Called(_a0, _a1, _a2, _a3) diff --git a/db/collection.go b/db/collection.go index c9d311f01a..23ef06d9c4 100644 --- a/db/collection.go +++ b/db/collection.go @@ -13,10 +13,13 @@ package db import ( "bytes" "context" + "encoding/json" "fmt" + "reflect" "strconv" "strings" + jsonpatch "github.com/evanphx/json-patch/v5" "github.com/ipfs/go-cid" ds "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" @@ -526,6 +529,283 @@ func validateUpdateSchemaFields( return hasChanged, nil } +func (db *db) patchCollection( + ctx context.Context, + txn datastore.Txn, + patchString string, +) error { + patch, err := jsonpatch.DecodePatch([]byte(patchString)) + if err != nil { + return err + } + + cols, err := description.GetCollections(ctx, txn) + if err != nil { + return err + } + + existingColsByID := map[uint32]client.CollectionDescription{} + for _, col := range cols { + existingColsByID[col.ID] = col + } + + existingDescriptionJson, err := json.Marshal(existingColsByID) + if err != nil { + return err + } + + newDescriptionJson, err := patch.Apply(existingDescriptionJson) + if err != nil { + return err + } + + var newColsByID map[uint32]client.CollectionDescription + decoder := json.NewDecoder(strings.NewReader(string(newDescriptionJson))) + decoder.DisallowUnknownFields() + err = decoder.Decode(&newColsByID) + if err != nil { + return err + } + + err = db.validateCollectionChanges(existingColsByID, newColsByID) + if err != nil { + return err + } + + for _, col := range newColsByID { + _, err := description.SaveCollection(ctx, txn, col) + if err != nil { + return err + } + } + + return db.loadSchema(ctx, txn) +} + +var patchCollectionValidators = []func( + map[uint32]client.CollectionDescription, + map[uint32]client.CollectionDescription, +) error{ + validateCollectionNameUnique, + validateSingleVersionActive, + validateSourcesNotModified, + validateIndexesNotModified, + validateFieldsNotModified, + validateIDNotZero, + validateIDUnique, + validateIDExists, + validateRootIDNotMutated, + validateSchemaVersionIDNotMutated, + validateCollectionNotRemoved, +} + +func (db *db) validateCollectionChanges( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, validators := range patchCollectionValidators { + err := validators(oldColsByID, newColsByID) + if err != nil { + return err + } + } + + return nil +} + +func validateCollectionNameUnique( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + names := map[string]struct{}{} + for _, col := range newColsByID { + if !col.Name.HasValue() { + continue + } + + if _, ok := names[col.Name.Value()]; ok { + return NewErrCollectionAlreadyExists(col.Name.Value()) + } + names[col.Name.Value()] = struct{}{} + } + + return nil +} + +func validateSingleVersionActive( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + rootsWithActiveCol := map[uint32]struct{}{} + for _, col := range newColsByID { + if !col.Name.HasValue() { + continue + } + + if _, ok := rootsWithActiveCol[col.RootID]; ok { + return NewErrMultipleActiveCollectionVersions(col.Name.Value(), col.RootID) + } + rootsWithActiveCol[col.RootID] = struct{}{} + } + + return nil +} + +func validateSourcesNotModified( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + // DeepEqual is temporary, as this validation is temporary, for example soon + // users will be able to be able to change the migration + if !reflect.DeepEqual(oldCol.Sources, newCol.Sources) { + return NewErrCollectionSourcesCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateIndexesNotModified( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + // DeepEqual is temporary, as this validation is temporary + if !reflect.DeepEqual(oldCol.Indexes, newCol.Indexes) { + return NewErrCollectionIndexesCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateFieldsNotModified( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + // DeepEqual is temporary, as this validation is temporary + if !reflect.DeepEqual(oldCol.Fields, newCol.Fields) { + return NewErrCollectionFieldsCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateIDNotZero( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + if newCol.ID == 0 { + return ErrCollectionIDCannotBeZero + } + } + + return nil +} + +func validateIDUnique( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + colIds := map[uint32]struct{}{} + for _, newCol := range newColsByID { + if _, ok := colIds[newCol.ID]; ok { + return NewErrCollectionIDAlreadyExists(newCol.ID) + } + colIds[newCol.ID] = struct{}{} + } + + return nil +} + +func validateIDExists( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + if _, ok := oldColsByID[newCol.ID]; !ok { + return NewErrAddCollectionIDWithPatch(newCol.ID) + } + } + + return nil +} + +func validateRootIDNotMutated( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + if newCol.RootID != oldCol.RootID { + return NewErrCollectionRootIDCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateSchemaVersionIDNotMutated( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + if newCol.SchemaVersionID != oldCol.SchemaVersionID { + return NewErrCollectionSchemaVersionIDCannotBeMutated(newCol.ID) + } + } + + return nil +} + +func validateCollectionNotRemoved( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { +oldLoop: + for _, oldCol := range oldColsByID { + for _, newCol := range newColsByID { + // It is not enough to just match by the map index, in case the index does not pair + // up with the ID (this can happen if a user moves the collection within the map) + if newCol.ID == oldCol.ID { + continue oldLoop + } + } + + return NewErrCollectionsCannotBeDeleted(oldCol.ID) + } + + return nil +} + // SetActiveSchemaVersion activates all collection versions with the given schema version, and deactivates all // those without it (if they share the same schema root). // diff --git a/db/description/collection.go b/db/description/collection.go index 8ffd473053..a6e9cd8b57 100644 --- a/db/description/collection.go +++ b/db/description/collection.go @@ -13,8 +13,10 @@ package description import ( "context" "encoding/json" + "errors" "sort" + ds "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" "github.com/sourcenetwork/defradb/client" @@ -29,6 +31,11 @@ func SaveCollection( txn datastore.Txn, desc client.CollectionDescription, ) (client.CollectionDescription, error) { + existing, err := GetCollectionByID(ctx, txn, desc.ID) + if err != nil && !errors.Is(err, ds.ErrNotFound) { + return client.CollectionDescription{}, err + } + buf, err := json.Marshal(desc) if err != nil { return client.CollectionDescription{}, err @@ -40,6 +47,35 @@ func SaveCollection( return client.CollectionDescription{}, err } + if existing.Name.HasValue() && existing.Name != desc.Name { + nameKey := core.NewCollectionNameKey(existing.Name.Value()) + idBuf, err := txn.Systemstore().Get(ctx, nameKey.ToDS()) + nameIndexExsts := true + if err != nil { + if errors.Is(err, ds.ErrNotFound) { + nameIndexExsts = false + } else { + return client.CollectionDescription{}, err + } + } + if nameIndexExsts { + var keyID uint32 + err = json.Unmarshal(idBuf, &keyID) + if err != nil { + return client.CollectionDescription{}, err + } + + if keyID == desc.ID { + // The name index may have already been overwritten, pointing at another collection + // we should only remove the existing index if it still points at this collection + err := txn.Systemstore().Delete(ctx, nameKey.ToDS()) + if err != nil { + return client.CollectionDescription{}, err + } + } + } + } + if desc.Name.HasValue() { idBuf, err := json.Marshal(desc.ID) if err != nil { diff --git a/db/errors.go b/db/errors.go index 34dd0d53b5..a41e396a8b 100644 --- a/db/errors.go +++ b/db/errors.go @@ -16,95 +16,112 @@ import ( ) const ( - errFailedToGetHeads string = "failed to get document heads" - errFailedToCreateCollectionQuery string = "failed to create collection prefix query" - errFailedToGetCollection string = "failed to get collection" - errFailedToGetAllCollections string = "failed to get all collections" - errDocVerification string = "the document verification failed" - errAddingP2PCollection string = "cannot add collection ID" - errRemovingP2PCollection string = "cannot remove collection ID" - errAddCollectionWithPatch string = "unknown collection, adding collections via patch is not supported" - errCollectionIDDoesntMatch string = "CollectionID does not match existing" - errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" - errCannotModifySchemaName string = "modifying the schema name is not supported" - errCannotSetVersionID string = "setting the VersionID is not supported. It is updated automatically" - errRelationalFieldMissingSchema string = "a `Schema` [name] must be provided when adding a new relation field" - errRelationalFieldInvalidRelationType string = "invalid RelationType" - errRelationalFieldMissingIDField string = "missing id field for relation object field" - errRelationalFieldMissingRelationName string = "missing relation name" - errPrimarySideNotDefined string = "primary side of relation not defined" - errPrimarySideOnMany string = "cannot set the many side of a relation as primary" - errBothSidesPrimary string = "both sides of a relation cannot be primary" - errRelatedFieldKindMismatch string = "invalid Kind of the related field" - errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" - errRelationalFieldIDInvalidType string = "relational id field of invalid kind" - errDuplicateField string = "duplicate field" - errCannotMutateField string = "mutating an existing field is not supported" - errCannotMoveField string = "moving fields is not currently supported" - errCannotDeleteField string = "deleting an existing field is not supported" - errFieldKindNotFound string = "no type found for given name" - errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" - errSchemaNotFound string = "no schema found for given name" - errDocumentAlreadyExists string = "a document with the given ID already exists" - errDocumentDeleted string = "a document with the given ID has been deleted" - errIndexMissingFields string = "index missing fields" - errNonZeroIndexIDProvided string = "non-zero index ID provided" - errIndexFieldMissingName string = "index field missing name" - errIndexFieldMissingDirection string = "index field missing direction" - errIndexWithNameAlreadyExists string = "index with name already exists" - errInvalidStoredIndex string = "invalid stored index" - errInvalidStoredIndexKey string = "invalid stored index key" - errNonExistingFieldForIndex string = "creating an index on a non-existing property" - errCollectionDoesntExisting string = "collection with given name doesn't exist" - errFailedToStoreIndexedField string = "failed to store indexed field" - errFailedToReadStoredIndexDesc string = "failed to read stored index description" - errCanNotDeleteIndexedField string = "can not delete indexed field" - errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" - errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" - errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" - errIndexWithNameDoesNotExists string = "index with name doesn't exists" - errCorruptedIndex string = "corrupted index. Please delete and recreate the index" - errInvalidFieldValue string = "invalid field value" - errUnsupportedIndexFieldType string = "unsupported index field type" - errIndexDescriptionHasNoFields string = "index description has no fields" - errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" - errCreateFile string = "failed to create file" - errRemoveFile string = "failed to remove file" - errOpenFile string = "failed to open file" - errCloseFile string = "failed to close file" - errFailedtoCloseQueryReqAllIDs string = "failed to close query requesting all docIDs" - errFailedToReadByte string = "failed to read byte" - errFailedToWriteString string = "failed to write string" - errJSONDecode string = "failed to decode JSON" - errDocFromMap string = "failed to create a new doc from map" - errDocCreate string = "failed to save a new doc to collection" - errDocUpdate string = "failed to update doc to collection" - errExpectedJSONObject string = "expected JSON object" - errExpectedJSONArray string = "expected JSON array" - errOneOneAlreadyLinked string = "target document is already linked to another document" - errIndexDoesNotMatchName string = "the index used does not match the given name" - errCanNotIndexNonUniqueFields string = "can not index a doc's field(s) that violates unique index" - errInvalidViewQuery string = "the query provided is not valid as a View" + errFailedToGetHeads string = "failed to get document heads" + errFailedToCreateCollectionQuery string = "failed to create collection prefix query" + errFailedToGetCollection string = "failed to get collection" + errFailedToGetAllCollections string = "failed to get all collections" + errDocVerification string = "the document verification failed" + errAddingP2PCollection string = "cannot add collection ID" + errRemovingP2PCollection string = "cannot remove collection ID" + errAddCollectionWithPatch string = "adding collections via patch is not supported" + errCollectionIDDoesntMatch string = "CollectionID does not match existing" + errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" + errCannotModifySchemaName string = "modifying the schema name is not supported" + errCannotSetVersionID string = "setting the VersionID is not supported" + errRelationalFieldMissingSchema string = "a schema name must be provided when adding a new relation field" + errRelationalFieldInvalidRelationType string = "invalid RelationType" + errRelationalFieldMissingIDField string = "missing id field for relation object field" + errRelationalFieldMissingRelationName string = "missing relation name" + errPrimarySideNotDefined string = "primary side of relation not defined" + errPrimarySideOnMany string = "cannot set the many side of a relation as primary" + errBothSidesPrimary string = "both sides of a relation cannot be primary" + errRelatedFieldKindMismatch string = "invalid Kind of the related field" + errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" + errRelationalFieldIDInvalidType string = "relational id field of invalid kind" + errDuplicateField string = "duplicate field" + errCannotMutateField string = "mutating an existing field is not supported" + errCannotMoveField string = "moving fields is not currently supported" + errCannotDeleteField string = "deleting an existing field is not supported" + errFieldKindNotFound string = "no type found for given name" + errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" + errSchemaNotFound string = "no schema found for given name" + errDocumentAlreadyExists string = "a document with the given ID already exists" + errDocumentDeleted string = "a document with the given ID has been deleted" + errIndexMissingFields string = "index missing fields" + errNonZeroIndexIDProvided string = "non-zero index ID provided" + errIndexFieldMissingName string = "index field missing name" + errIndexFieldMissingDirection string = "index field missing direction" + errIndexWithNameAlreadyExists string = "index with name already exists" + errInvalidStoredIndex string = "invalid stored index" + errInvalidStoredIndexKey string = "invalid stored index key" + errNonExistingFieldForIndex string = "creating an index on a non-existing property" + errCollectionDoesntExisting string = "collection with given name doesn't exist" + errFailedToStoreIndexedField string = "failed to store indexed field" + errFailedToReadStoredIndexDesc string = "failed to read stored index description" + errCanNotDeleteIndexedField string = "can not delete indexed field" + errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" + errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" + errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" + errIndexWithNameDoesNotExists string = "index with name doesn't exists" + errCorruptedIndex string = "corrupted index. Please delete and recreate the index" + errInvalidFieldValue string = "invalid field value" + errUnsupportedIndexFieldType string = "unsupported index field type" + errIndexDescriptionHasNoFields string = "index description has no fields" + errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" + errCreateFile string = "failed to create file" + errRemoveFile string = "failed to remove file" + errOpenFile string = "failed to open file" + errCloseFile string = "failed to close file" + errFailedtoCloseQueryReqAllIDs string = "failed to close query requesting all docIDs" + errFailedToReadByte string = "failed to read byte" + errFailedToWriteString string = "failed to write string" + errJSONDecode string = "failed to decode JSON" + errDocFromMap string = "failed to create a new doc from map" + errDocCreate string = "failed to save a new doc to collection" + errDocUpdate string = "failed to update doc to collection" + errExpectedJSONObject string = "expected JSON object" + errExpectedJSONArray string = "expected JSON array" + errOneOneAlreadyLinked string = "target document is already linked to another document" + errIndexDoesNotMatchName string = "the index used does not match the given name" + errCanNotIndexNonUniqueFields string = "can not index a doc's field(s) that violates unique index" + errInvalidViewQuery string = "the query provided is not valid as a View" + errCollectionAlreadyExists string = "collection already exists" + errMultipleActiveCollectionVersions string = "multiple versions of same collection cannot be active" + errCollectionSourcesCannotBeMutated string = "collection sources cannot be mutated" + errCollectionIndexesCannotBeMutated string = "collection indexes cannot be mutated" + errCollectionFieldsCannotBeMutated string = "collection fields cannot be mutated" + errCollectionRootIDCannotBeMutated string = "collection root ID cannot be mutated" + errCollectionSchemaVersionIDCannotBeMutated string = "collection schema version ID cannot be mutated" + errCollectionIDCannotBeZero string = "collection ID cannot be zero" + errCollectionsCannotBeDeleted string = "collections cannot be deleted" ) var ( - ErrFailedToGetCollection = errors.New(errFailedToGetCollection) - ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") - ErrInvalidFilter = errors.New("invalid filter") - ErrCollectionAlreadyExists = errors.New("collection already exists") - ErrCollectionNameEmpty = errors.New("collection name can't be empty") - ErrSchemaNameEmpty = errors.New("schema name can't be empty") - ErrSchemaRootEmpty = errors.New("schema root can't be empty") - ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") - ErrKeyEmpty = errors.New("key cannot be empty") - ErrCannotSetVersionID = errors.New(errCannotSetVersionID) - ErrIndexMissingFields = errors.New(errIndexMissingFields) - ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) - ErrCorruptedIndex = errors.New(errCorruptedIndex) - ErrExpectedJSONObject = errors.New(errExpectedJSONObject) - ErrExpectedJSONArray = errors.New(errExpectedJSONArray) - ErrInvalidViewQuery = errors.New(errInvalidViewQuery) - ErrCanNotIndexNonUniqueFields = errors.New(errCanNotIndexNonUniqueFields) + ErrFailedToGetCollection = errors.New(errFailedToGetCollection) + ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") + ErrInvalidFilter = errors.New("invalid filter") + ErrCollectionAlreadyExists = errors.New(errCollectionAlreadyExists) + ErrCollectionNameEmpty = errors.New("collection name can't be empty") + ErrSchemaNameEmpty = errors.New("schema name can't be empty") + ErrSchemaRootEmpty = errors.New("schema root can't be empty") + ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") + ErrKeyEmpty = errors.New("key cannot be empty") + ErrCannotSetVersionID = errors.New(errCannotSetVersionID) + ErrIndexMissingFields = errors.New(errIndexMissingFields) + ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) + ErrCorruptedIndex = errors.New(errCorruptedIndex) + ErrExpectedJSONObject = errors.New(errExpectedJSONObject) + ErrExpectedJSONArray = errors.New(errExpectedJSONArray) + ErrInvalidViewQuery = errors.New(errInvalidViewQuery) + ErrCanNotIndexNonUniqueFields = errors.New(errCanNotIndexNonUniqueFields) + ErrMultipleActiveCollectionVersions = errors.New(errMultipleActiveCollectionVersions) + ErrCollectionSourcesCannotBeMutated = errors.New(errCollectionSourcesCannotBeMutated) + ErrCollectionIndexesCannotBeMutated = errors.New(errCollectionIndexesCannotBeMutated) + ErrCollectionFieldsCannotBeMutated = errors.New(errCollectionFieldsCannotBeMutated) + ErrCollectionRootIDCannotBeMutated = errors.New(errCollectionRootIDCannotBeMutated) + ErrCollectionSchemaVersionIDCannotBeMutated = errors.New(errCollectionSchemaVersionIDCannotBeMutated) + ErrCollectionIDCannotBeZero = errors.New(errCollectionIDCannotBeZero) + ErrCollectionsCannotBeDeleted = errors.New(errCollectionsCannotBeDeleted) ) // NewErrFailedToGetHeads returns a new error indicating that the heads of a document @@ -208,6 +225,13 @@ func NewErrAddCollectionWithPatch(name string) error { ) } +func NewErrAddCollectionIDWithPatch(id uint32) error { + return errors.New( + errAddCollectionWithPatch, + errors.NewKV("ID", id), + ) +} + func NewErrCollectionIDDoesntMatch(name string, existingID, proposedID uint32) error { return errors.New( errCollectionIDDoesntMatch, @@ -543,3 +567,67 @@ func NewErrInvalidViewQueryMissingQuery() error { errors.NewKV("Reason", "No query provided"), ) } + +func NewErrCollectionAlreadyExists(name string) error { + return errors.New( + errCollectionAlreadyExists, + errors.NewKV("Name", name), + ) +} + +func NewErrCollectionIDAlreadyExists(id uint32) error { + return errors.New( + errCollectionAlreadyExists, + errors.NewKV("ID", id), + ) +} + +func NewErrMultipleActiveCollectionVersions(name string, root uint32) error { + return errors.New( + errMultipleActiveCollectionVersions, + errors.NewKV("Name", name), + errors.NewKV("Root", root), + ) +} + +func NewErrCollectionSourcesCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionSourcesCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionIndexesCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionIndexesCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionFieldsCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionFieldsCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionRootIDCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionRootIDCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionSchemaVersionIDCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionSchemaVersionIDCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + +func NewErrCollectionsCannotBeDeleted(colID uint32) error { + return errors.New( + errCollectionsCannotBeDeleted, + errors.NewKV("CollectionID", colID), + ) +} diff --git a/db/txn_db.go b/db/txn_db.go index f2fbe7cea3..09a7002033 100644 --- a/db/txn_db.go +++ b/db/txn_db.go @@ -267,6 +267,31 @@ func (db *explicitTxnDB) PatchSchema( return db.patchSchema(ctx, db.txn, patchString, migration, setAsDefaultVersion) } +func (db *implicitTxnDB) PatchCollection( + ctx context.Context, + patchString string, +) error { + txn, err := db.NewTxn(ctx, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.patchCollection(ctx, txn, patchString) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *explicitTxnDB) PatchCollection( + ctx context.Context, + patchString string, +) error { + return db.patchCollection(ctx, db.txn, patchString) +} + func (db *implicitTxnDB) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { txn, err := db.NewTxn(ctx, false) if err != nil { diff --git a/http/client.go b/http/client.go index 142a359c5b..33b9c21fb8 100644 --- a/http/client.go +++ b/http/client.go @@ -161,6 +161,25 @@ func (c *Client) PatchSchema( return err } +func (c *Client) PatchCollection( + ctx context.Context, + patch string, +) error { + methodURL := c.http.baseURL.JoinPath("collections") + + body, err := json.Marshal(patch) + if err != nil { + return err + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + func (c *Client) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { methodURL := c.http.baseURL.JoinPath("schema", "default") diff --git a/http/handler_store.go b/http/handler_store.go index af82f0bc44..6077e6ea60 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -92,6 +92,24 @@ func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { rw.WriteHeader(http.StatusOK) } +func (s *storeHandler) PatchCollection(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + var patch string + err := requestJSON(req, &patch) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + err = store.PatchCollection(req.Context(), patch) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + func (s *storeHandler) SetActiveSchemaVersion(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) @@ -476,6 +494,17 @@ func (h *storeHandler) bindRoutes(router *Router) { collectionDescribe.AddResponse(200, collectionsResponse) collectionDescribe.Responses.Set("400", errorResponse) + patchCollection := openapi3.NewOperation() + patchCollection.OperationID = "patch_collection" + patchCollection.Description = "Update collection definitions" + patchCollection.Tags = []string{"collection"} + patchCollection.RequestBody = &openapi3.RequestBodyRef{ + Value: openapi3.NewRequestBody().WithJSONSchema(openapi3.NewStringSchema()), + } + patchCollection.Responses = openapi3.NewResponses() + patchCollection.Responses.Set("200", successResponse) + patchCollection.Responses.Set("400", errorResponse) + collectionDefintionsSchema := openapi3.NewArraySchema() collectionDefintionsSchema.Items = collectionDefinitionSchema @@ -590,6 +619,7 @@ func (h *storeHandler) bindRoutes(router *Router) { router.AddRoute("/backup/export", http.MethodPost, backupExport, h.BasicExport) router.AddRoute("/backup/import", http.MethodPost, backupImport, h.BasicImport) router.AddRoute("/collections", http.MethodGet, collectionDescribe, h.GetCollection) + router.AddRoute("/collections", http.MethodPatch, patchCollection, h.PatchCollection) router.AddRoute("/view", http.MethodPost, views, h.AddView) router.AddRoute("/view", http.MethodPost, views, h.AddView) router.AddRoute("/graphql", http.MethodGet, graphQLGet, h.ExecRequest) diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 89ba2cf3db..4c52a86abc 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -210,6 +210,16 @@ func (w *Wrapper) PatchSchema( return err } +func (w *Wrapper) PatchCollection( + ctx context.Context, + patch string, +) error { + args := []string{"client", "collection", "patch"} + args = append(args, patch) + _, err := w.cmd.execute(ctx, args) + return err +} + func (w *Wrapper) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { args := []string{"client", "schema", "set-active"} args = append(args, schemaVersionID) diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index b45105a7f7..4de71c4f1f 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -106,6 +106,13 @@ func (w *Wrapper) PatchSchema( return w.client.PatchSchema(ctx, patch, migration, setAsDefaultVersion) } +func (w *Wrapper) PatchCollection( + ctx context.Context, + patch string, +) error { + return w.client.PatchCollection(ctx, patch) +} + func (w *Wrapper) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { return w.client.SetActiveSchemaVersion(ctx, schemaVersionID) } diff --git a/tests/integration/collection_description/simple_test.go b/tests/integration/collection_description/simple_test.go new file mode 100644 index 0000000000..1070e8cd99 --- /dev/null +++ b/tests/integration/collection_description/simple_test.go @@ -0,0 +1,42 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package collection_description + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrSimpleCreatesColGivenEmptyType(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.GetCollections{ + ExpectedResults: []client.CollectionDescription{ + { + ID: 1, + Name: immutable.Some("Users"), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/add/collections_test.go b/tests/integration/collection_description/updates/add/collections_test.go new file mode 100644 index 0000000000..9193b57dc6 --- /dev/null +++ b/tests/integration/collection_description/updates/add/collections_test.go @@ -0,0 +1,107 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package add + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateAddCollections_WithUndefinedID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/2", "value": {"Name": "Dogs"} } + ] + `, + ExpectedError: "collection ID cannot be zero", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateAddCollections_WithZeroedID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/2", "value": {"ID": 0, "Name": "Dogs"} } + ] + `, + ExpectedError: "collection ID cannot be zero", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateAddCollections_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/2", "value": {"ID": 2, "Name": "Dogs"} } + ] + `, + ExpectedError: "adding collections via patch is not supported. ID: 2", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateAddCollections_WithNoIndex_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/-", "value": {"Name": "Dogs"} } + ] + `, + // We get this error because we are marshalling into a map[uint32]CollectionDescription, + // we will need to handle `-` when we allow adding collections via patches. + ExpectedError: "json: cannot unmarshal number - into Go value of type uint32", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/add/sources_test.go b/tests/integration/collection_description/updates/add/sources_test.go new file mode 100644 index 0000000000..37010aa15c --- /dev/null +++ b/tests/integration/collection_description/updates/add/sources_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package add + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateAddSources_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "add", "path": "/1/Sources/-", "value": {"SourceCollectionID": 1} } + ] + `, + ExpectedError: "collection sources cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/copy/name_test.go b/tests/integration/collection_description/updates/copy/name_test.go new file mode 100644 index 0000000000..b915d111ac --- /dev/null +++ b/tests/integration/collection_description/updates/copy/name_test.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package copy + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateCopyName_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "copy", "from": "/1/Name", "path": "/2/Name" } + ] + `, + ExpectedError: "collection already exists. Name: Users", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateCopyName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + // Activate the second collection by setting its name to that of the first, + // then decativate the original collection version by removing the name + Patch: ` + [ + { "op": "copy", "from": "/1/Name", "path": "/2/Name" }, + { "op": "remove", "path": "/1/Name" } + ] + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/move/name_test.go b/tests/integration/collection_description/updates/move/name_test.go new file mode 100644 index 0000000000..f493b03c1a --- /dev/null +++ b/tests/integration/collection_description/updates/move/name_test.go @@ -0,0 +1,66 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package move + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateMoveName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + // Make the second collection the active one by moving its name from the first to the second + Patch: ` + [ + { "op": "move", "from": "/1/Name", "path": "/2/Name" } + ] + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/remove/collections_test.go b/tests/integration/collection_description/updates/remove/collections_test.go new file mode 100644 index 0000000000..b9363bde66 --- /dev/null +++ b/tests/integration/collection_description/updates/remove/collections_test.go @@ -0,0 +1,41 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package remove + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateRemoveCollections(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/1" } + ] + `, + ExpectedError: `collections cannot be deleted. CollectionID: 1`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/remove/name_test.go b/tests/integration/collection_description/updates/remove/name_test.go new file mode 100644 index 0000000000..e352491cd7 --- /dev/null +++ b/tests/integration/collection_description/updates/remove/name_test.go @@ -0,0 +1,49 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package remove + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateRemoveName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/1/Name" } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + // The Users collection has been deactivated and is no longer accessible + ExpectedError: `Cannot query field "Users" on type "Query".`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/fields_test.go b/tests/integration/collection_description/updates/replace/fields_test.go new file mode 100644 index 0000000000..03aa8cdb1e --- /dev/null +++ b/tests/integration/collection_description/updates/replace/fields_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceFields_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Fields", "value": [{}] } + ] + `, + ExpectedError: "collection fields cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/id_test.go b/tests/integration/collection_description/updates/replace/id_test.go new file mode 100644 index 0000000000..b83c634385 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/id_test.go @@ -0,0 +1,146 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceID_WithZero_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 0 } + ] + `, + ExpectedError: "collection ID cannot be zero", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceID_WithExisting_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Books {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 2 } + ] + `, + ExpectedError: "collection already exists. ID: 2", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceID_WithExistingSameRoot_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": "String"} } + ] + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 2 }, + { "op": "replace", "path": "/2/ID", "value": 1 } + ] + `, + ExpectedError: "collection sources cannot be mutated.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceID_WithExistingDifferentRoot_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Dogs {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 2 }, + { "op": "replace", "path": "/2/ID", "value": 1 } + ] + `, + ExpectedError: "collection root ID cannot be mutated.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceID_WithNew_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/ID", "value": 2 } + ] + `, + ExpectedError: "adding collections via patch is not supported. ID: 2", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/indexes_test.go b/tests/integration/collection_description/updates/replace/indexes_test.go new file mode 100644 index 0000000000..9302d1f192 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/indexes_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceIndexes_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Indexes", "value": [{}] } + ] + `, + ExpectedError: "collection indexes cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/name_test.go b/tests/integration/collection_description/updates/replace/name_test.go new file mode 100644 index 0000000000..98f1ba8c98 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/name_test.go @@ -0,0 +1,210 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceName_GivenExistingName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Name", "value": "Actors" } + ] + `, + }, + testUtils.GetCollections{ + ExpectedResults: []client.CollectionDescription{ + { + ID: 1, + Name: immutable.Some("Actors"), + }, + }, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + ExpectedError: `Cannot query field "Users" on type "Query".`, + }, + testUtils.Request{ + Request: `query { + Actors { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceName_GivenInactiveCollectionWithSameName_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/2/Name", "value": "Users" } + ] + `, + ExpectedError: "collection already exists. Name: Users", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceName_GivenInactiveCollection_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/2/Name", "value": "Actors" } + ] + `, + // The params at the end of the error message is dependant on the order Go decides to iterate through + // a map and so is not included in the test. + ExpectedError: "multiple versions of same collection cannot be active", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceName_RemoveExistingName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "String"} } + ] + `, + SetAsDefaultVersion: immutable.Some(false), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/1/Name" }, + { "op": "replace", "path": "/2/Name", "value": "Actors" } + ] + `, + }, + testUtils.GetCollections{ + FilterOptions: client.CollectionFetchOptions{ + IncludeInactive: immutable.Some(true), + }, + ExpectedResults: []client.CollectionDescription{ + { + ID: 1, + }, + { + ID: 2, + Name: immutable.Some("Actors"), + Sources: []any{ + &client.CollectionSource{ + SourceCollectionID: 1, + }, + }, + }, + }, + }, + testUtils.Request{ + Request: `query { + Actors { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/root_id_test.go b/tests/integration/collection_description/updates/replace/root_id_test.go new file mode 100644 index 0000000000..fee98f0664 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/root_id_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceRootID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/RootID", "value": 2 } + ] + `, + ExpectedError: "collection root ID cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/schema_version_id_test.go b/tests/integration/collection_description/updates/replace/schema_version_id_test.go new file mode 100644 index 0000000000..e4b1e7f42c --- /dev/null +++ b/tests/integration/collection_description/updates/replace/schema_version_id_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceSchemaVersionID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/SchemaVersionID", "value": "ghfdsas" } + ] + `, + ExpectedError: "collection schema version ID cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/sources_test.go b/tests/integration/collection_description/updates/replace/sources_test.go new file mode 100644 index 0000000000..2d06e01d4a --- /dev/null +++ b/tests/integration/collection_description/updates/replace/sources_test.go @@ -0,0 +1,39 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceSources_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Sources", "value": [{"SourceCollectionID": 1}] } + ] + `, + ExpectedError: "collection sources cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/test/name_test.go b/tests/integration/collection_description/updates/test/name_test.go new file mode 100644 index 0000000000..7baa13aca1 --- /dev/null +++ b/tests/integration/collection_description/updates/test/name_test.go @@ -0,0 +1,60 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateTestName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "test", "path": "/1/Name", "value": "Users" } + ] + `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateTestName_Fails(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "test", "path": "/1/Name", "value": "Dogs" } + ] + `, + ExpectedError: "testing value /1/Name failed: test failed", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index 95b19e1a59..f1f8c05411 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -34,7 +34,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 17} } ] `, - ExpectedError: "a `Schema` [name] must be provided when adding a new relation field. Field: foo, Kind: 17", + ExpectedError: "a schema name must be provided when adding a new relation field. Field: foo, Kind: 17", }, }, } diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go index 525c41d658..794ce0a546 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go @@ -34,7 +34,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 16} } ] `, - ExpectedError: "a `Schema` [name] must be provided when adding a new relation field. Field: foo, Kind: 16", + ExpectedError: "a schema name must be provided when adding a new relation field. Field: foo, Kind: 16", }, }, } diff --git a/tests/integration/schema/updates/index/simple_test.go b/tests/integration/schema/updates/add/field/with_index_test.go similarity index 88% rename from tests/integration/schema/updates/index/simple_test.go rename to tests/integration/schema/updates/add/field/with_index_test.go index fb506ec623..52815789f8 100644 --- a/tests/integration/schema/updates/index/simple_test.go +++ b/tests/integration/schema/updates/add/field/with_index_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package index +package field import ( "testing" @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestPatching_ForCollectionWithIndex_StillWorks(t *testing.T) { +func TestSchemaUpdatesAddFieldSimple_WithExistingIndex(t *testing.T) { test := testUtils.TestCase{ Description: "Test patching schema for collection with index still works", Actions: []any{ diff --git a/tests/integration/schema/updates/add/simple_test.go b/tests/integration/schema/updates/add/simple_test.go index 0eac29b49a..88d36680b0 100644 --- a/tests/integration/schema/updates/add/simple_test.go +++ b/tests/integration/schema/updates/add/simple_test.go @@ -33,7 +33,7 @@ func TestSchemaUpdatesAddSimpleErrorsAddingSchema(t *testing.T) { { "op": "add", "path": "/-", "value": {"Name": "books"} } ] `, - ExpectedError: "unknown collection, adding collections via patch is not supported. Name: books", + ExpectedError: "adding collections via patch is not supported. Name: books", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/schema/updates/copy/simple_test.go b/tests/integration/schema/updates/copy/simple_test.go index 206cd49b52..cdda8abaf8 100644 --- a/tests/integration/schema/updates/copy/simple_test.go +++ b/tests/integration/schema/updates/copy/simple_test.go @@ -38,7 +38,7 @@ func TestSchemaUpdatesCopyCollectionWithRemoveIDAndReplaceName(t *testing.T) { { "op": "replace", "path": "/Book/Name", "value": "Book" } ] `, - ExpectedError: "unknown collection, adding collections via patch is not supported. Name: Book", + ExpectedError: "adding collections via patch is not supported. Name: Book", }, }, } diff --git a/tests/integration/schema/updates/replace/simple_test.go b/tests/integration/schema/updates/replace/simple_test.go index 7729a274c9..722ff36f9b 100644 --- a/tests/integration/schema/updates/replace/simple_test.go +++ b/tests/integration/schema/updates/replace/simple_test.go @@ -44,7 +44,7 @@ func TestSchemaUpdatesReplaceCollectionErrors(t *testing.T) { // WARNING: An error is still expected if/when we allow the adding of collections, as this also // implies that the "Users" collection is to be deleted. Only once we support the adding *and* // removal of collections should this not error. - ExpectedError: "unknown collection, adding collections via patch is not supported. Name: Book", + ExpectedError: "adding collections via patch is not supported. Name: Book", }, }, } diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index ce6e456fbb..7cda289319 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -97,6 +97,18 @@ type SchemaPatch struct { ExpectedError string } +type PatchCollection struct { + // NodeID may hold the ID (index) of a node to apply this patch to. + // + // If a value is not provided the patch will be applied to all nodes. + NodeID immutable.Option[int] + + // The Patch to apply to the collection description. + Patch string + + ExpectedError string +} + // GetSchema is an action that fetches schema using the provided options. type GetSchema struct { // NodeID may hold the ID (index) of a node to apply this patch to. diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index d5cdcbd01d..40b2c81d86 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -260,6 +260,9 @@ func performAction( case SchemaPatch: patchSchema(s, action) + case PatchCollection: + patchCollection(s, action) + case GetSchema: getSchema(s, action) @@ -1005,6 +1008,22 @@ func patchSchema( refreshIndexes(s) } +func patchCollection( + s *state, + action PatchCollection, +) { + for _, node := range getNodes(action.NodeID, s.nodes) { + err := node.PatchCollection(s.ctx, action.Patch) + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + } + + // If the schema was updated we need to refresh the collection definitions. + refreshCollections(s) + refreshIndexes(s) +} + func getSchema( s *state, action GetSchema, From c74a6f7c2201952fd831f6438d7a0600c3ae8106 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Fri, 15 Mar 2024 15:28:16 -0400 Subject: [PATCH 03/49] refactor(i): Break up client/descriptions.go (#2412) ## Relevant issue(s) Resolves #2411 ## Description Breaks up client/descriptions.go into multiple files. --- ...criptions.go => collection_description.go} | 298 +++--------------- client/collection_field_description.go | 29 ++ client/schema_description.go | 49 +++ client/schema_field_description.go | 171 ++++++++++ 4 files changed, 288 insertions(+), 259 deletions(-) rename client/{descriptions.go => collection_description.go} (52%) create mode 100644 client/collection_field_description.go create mode 100644 client/schema_description.go create mode 100644 client/schema_field_description.go diff --git a/client/descriptions.go b/client/collection_description.go similarity index 52% rename from client/descriptions.go rename to client/collection_description.go index dd12e9cf00..2e3e10aa36 100644 --- a/client/descriptions.go +++ b/client/collection_description.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -67,6 +67,44 @@ type CollectionDescription struct { Indexes []IndexDescription } +// QuerySource represents a collection data source from a query. +// +// The query will be executed when data from this source is requested, and the query results +// yielded to the consumer. +type QuerySource struct { + // Query contains the base query of this data source. + Query request.Select + + // Transform is a optional Lens configuration. If specified, data drawn from the [Query] will have the + // transform applied before being returned. + // + // The transform is not limited to just transforming the input documents, it may also yield new ones, or filter out + // those passed in from the underlying query. + Transform immutable.Option[model.Lens] +} + +// CollectionSource represents a collection data source from another collection instance. +// +// Data against all collection instances in a CollectionSource chain will be returned as-if +// from the same dataset when queried. Lens transforms may be applied between instances. +// +// Typically these are used to link together multiple schema versions into the same dataset. +type CollectionSource struct { + // SourceCollectionID is the local identifier of the source [CollectionDescription] from which to + // share data. + // + // This is a bi-directional relationship, and documents in the host collection instance will also + // be available to the source collection instance. + SourceCollectionID uint32 + + // Transform is a optional Lens configuration. If specified, data drawn from the source will have the + // transform applied before being returned by any operation on the host collection instance. + // + // If the transform supports an inverse operation, that inverse will be applied when the source collection + // draws data from this host. + Transform immutable.Option[model.Lens] +} + // IDString returns the collection ID as a string. func (col CollectionDescription) IDString() string { return fmt.Sprint(col.ID) @@ -83,17 +121,6 @@ func (col CollectionDescription) GetFieldByName(fieldName string) (CollectionFie return CollectionFieldDescription{}, false } -// GetFieldByName returns the field for the given field name. If such a field is found it -// will return it and true, if it is not found it will return false. -func (s SchemaDescription) GetFieldByName(fieldName string) (SchemaFieldDescription, bool) { - for _, field := range s.Fields { - if field.Name == fieldName { - return field, true - } - } - return SchemaFieldDescription{}, false -} - // GetFieldByRelation returns the field that supports the relation of the given name. func (col CollectionDescription) GetFieldByRelation( relationName string, @@ -131,253 +158,6 @@ func sourcesOfType[ResultType any](col CollectionDescription) []ResultType { return result } -// QuerySource represents a collection data source from a query. -// -// The query will be executed when data from this source is requested, and the query results -// yielded to the consumer. -type QuerySource struct { - // Query contains the base query of this data source. - Query request.Select - - // Transform is a optional Lens configuration. If specified, data drawn from the [Query] will have the - // transform applied before being returned. - // - // The transform is not limited to just transforming the input documents, it may also yield new ones, or filter out - // those passed in from the underlying query. - Transform immutable.Option[model.Lens] -} - -// CollectionSource represents a collection data source from another collection instance. -// -// Data against all collection instances in a CollectionSource chain will be returned as-if -// from the same dataset when queried. Lens transforms may be applied between instances. -// -// Typically these are used to link together multiple schema versions into the same dataset. -type CollectionSource struct { - // SourceCollectionID is the local identifier of the source [CollectionDescription] from which to - // share data. - // - // This is a bi-directional relationship, and documents in the host collection instance will also - // be available to the source collection instance. - SourceCollectionID uint32 - - // Transform is a optional Lens configuration. If specified, data drawn from the source will have the - // transform applied before being returned by any operation on the host collection instance. - // - // If the transform supports an inverse operation, that inverse will be applied when the source collection - // draws data from this host. - Transform immutable.Option[model.Lens] -} - -// SchemaDescription describes a Schema and its associated metadata. -type SchemaDescription struct { - // Root is the version agnostic identifier for this schema. - // - // It remains constant throughout the lifetime of this schema. - Root string - - // VersionID is the version-specific identifier for this schema. - // - // It is generated on mutation of this schema and can be used to uniquely - // identify a schema at a specific version. - VersionID string - - // Name is the name of this Schema. - // - // It is currently used to define the Collection Name, and as such these two properties - // will currently share the same name. - // - // It is immutable. - Name string - - // Fields contains the fields within this Schema. - // - // Currently new fields may be added after initial declaration, but they cannot be removed. - Fields []SchemaFieldDescription -} - -// FieldKind describes the type of a field. -type FieldKind uint8 - -func (f FieldKind) String() string { - switch f { - case FieldKind_DocID: - return "ID" - case FieldKind_NILLABLE_BOOL: - return "Boolean" - case FieldKind_NILLABLE_BOOL_ARRAY: - return "[Boolean]" - case FieldKind_BOOL_ARRAY: - return "[Boolean!]" - case FieldKind_NILLABLE_INT: - return "Int" - case FieldKind_NILLABLE_INT_ARRAY: - return "[Int]" - case FieldKind_INT_ARRAY: - return "[Int!]" - case FieldKind_NILLABLE_DATETIME: - return "DateTime" - case FieldKind_NILLABLE_FLOAT: - return "Float" - case FieldKind_NILLABLE_FLOAT_ARRAY: - return "[Float]" - case FieldKind_FLOAT_ARRAY: - return "[Float!]" - case FieldKind_NILLABLE_STRING: - return "String" - case FieldKind_NILLABLE_STRING_ARRAY: - return "[String]" - case FieldKind_STRING_ARRAY: - return "[String!]" - case FieldKind_NILLABLE_BLOB: - return "Blob" - case FieldKind_NILLABLE_JSON: - return "JSON" - default: - return fmt.Sprint(uint8(f)) - } -} - -// IsObject returns true if this FieldKind is an object type. -func (f FieldKind) IsObject() bool { - return f == FieldKind_FOREIGN_OBJECT || - f == FieldKind_FOREIGN_OBJECT_ARRAY -} - -// IsObjectArray returns true if this FieldKind is an object array type. -func (f FieldKind) IsObjectArray() bool { - return f == FieldKind_FOREIGN_OBJECT_ARRAY -} - -// IsArray returns true if this FieldKind is an array type which includes inline arrays as well -// as relation arrays. -func (f FieldKind) IsArray() bool { - return f == FieldKind_BOOL_ARRAY || - f == FieldKind_INT_ARRAY || - f == FieldKind_FLOAT_ARRAY || - f == FieldKind_STRING_ARRAY || - f == FieldKind_FOREIGN_OBJECT_ARRAY || - f == FieldKind_NILLABLE_BOOL_ARRAY || - f == FieldKind_NILLABLE_INT_ARRAY || - f == FieldKind_NILLABLE_FLOAT_ARRAY || - f == FieldKind_NILLABLE_STRING_ARRAY -} - -// Note: These values are serialized and persisted in the database, avoid modifying existing values. -const ( - FieldKind_None FieldKind = 0 - FieldKind_DocID FieldKind = 1 - FieldKind_NILLABLE_BOOL FieldKind = 2 - FieldKind_BOOL_ARRAY FieldKind = 3 - FieldKind_NILLABLE_INT FieldKind = 4 - FieldKind_INT_ARRAY FieldKind = 5 - FieldKind_NILLABLE_FLOAT FieldKind = 6 - FieldKind_FLOAT_ARRAY FieldKind = 7 - _ FieldKind = 8 // safe to repurpose (was never used) - _ FieldKind = 9 // safe to repurpose (previously old field) - FieldKind_NILLABLE_DATETIME FieldKind = 10 - FieldKind_NILLABLE_STRING FieldKind = 11 - FieldKind_STRING_ARRAY FieldKind = 12 - FieldKind_NILLABLE_BLOB FieldKind = 13 - FieldKind_NILLABLE_JSON FieldKind = 14 - _ FieldKind = 15 // safe to repurpose (was never used) - - // Embedded object, but accessed via foreign keys - FieldKind_FOREIGN_OBJECT FieldKind = 16 - - // Array of embedded objects, accessed via foreign keys - FieldKind_FOREIGN_OBJECT_ARRAY FieldKind = 17 - - FieldKind_NILLABLE_BOOL_ARRAY FieldKind = 18 - FieldKind_NILLABLE_INT_ARRAY FieldKind = 19 - FieldKind_NILLABLE_FLOAT_ARRAY FieldKind = 20 - FieldKind_NILLABLE_STRING_ARRAY FieldKind = 21 -) - -// FieldKindStringToEnumMapping maps string representations of [FieldKind] values to -// their enum values. -// -// It is currently used to by [db.PatchSchema] to allow string representations of -// [FieldKind] to be provided instead of their raw int values. This usage may expand -// in the future. They currently roughly correspond to the GQL field types, but this -// equality is not guaranteed. -var FieldKindStringToEnumMapping = map[string]FieldKind{ - "ID": FieldKind_DocID, - "Boolean": FieldKind_NILLABLE_BOOL, - "[Boolean]": FieldKind_NILLABLE_BOOL_ARRAY, - "[Boolean!]": FieldKind_BOOL_ARRAY, - "Int": FieldKind_NILLABLE_INT, - "[Int]": FieldKind_NILLABLE_INT_ARRAY, - "[Int!]": FieldKind_INT_ARRAY, - "DateTime": FieldKind_NILLABLE_DATETIME, - "Float": FieldKind_NILLABLE_FLOAT, - "[Float]": FieldKind_NILLABLE_FLOAT_ARRAY, - "[Float!]": FieldKind_FLOAT_ARRAY, - "String": FieldKind_NILLABLE_STRING, - "[String]": FieldKind_NILLABLE_STRING_ARRAY, - "[String!]": FieldKind_STRING_ARRAY, - "Blob": FieldKind_NILLABLE_BLOB, - "JSON": FieldKind_NILLABLE_JSON, -} - -// RelationType describes the type of relation between two types. -type RelationType uint8 - -// FieldID is a unique identifier for a field in a schema. -type FieldID uint32 - -func (f FieldID) String() string { - return fmt.Sprint(uint32(f)) -} - -// SchemaFieldDescription describes a field on a Schema and its associated metadata. -type SchemaFieldDescription struct { - // Name contains the name of this field. - // - // It is currently immutable. - Name string - - // The data type that this field holds. - // - // Must contain a valid value. It is currently immutable. - Kind FieldKind - - // Schema contains the schema name of the type this field contains if this field is - // a relation field. Otherwise this will be empty. - Schema string - - // RelationName the name of the relationship that this field represents if this field is - // a relation field. Otherwise this will be empty. - RelationName string - - // The CRDT Type of this field. If no type has been provided it will default to [LWW_REGISTER]. - // - // It is currently immutable. - Typ CType - - // If true, this is the primary half of a relation, otherwise is false. - IsPrimaryRelation bool -} - -// CollectionFieldDescription describes the local components of a field on a collection. -type CollectionFieldDescription struct { - // Name contains the name of the [SchemaFieldDescription] that this field uses. - Name string - - // ID contains the local, internal ID of this field. - ID FieldID -} - -// IsRelation returns true if this field is a relation. -func (f SchemaFieldDescription) IsRelation() bool { - return f.RelationName != "" -} - -// IsSet returns true if the target relation type is set. -func (m RelationType) IsSet(target RelationType) bool { - return m&target > 0 -} - // collectionDescription is a private type used to facilitate the unmarshalling // of json to a [CollectionDescription]. type collectionDescription struct { diff --git a/client/collection_field_description.go b/client/collection_field_description.go new file mode 100644 index 0000000000..048cde24c0 --- /dev/null +++ b/client/collection_field_description.go @@ -0,0 +1,29 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import "fmt" + +// FieldID is a unique identifier for a field in a schema. +type FieldID uint32 + +// CollectionFieldDescription describes the local components of a field on a collection. +type CollectionFieldDescription struct { + // Name contains the name of the [SchemaFieldDescription] that this field uses. + Name string + + // ID contains the local, internal ID of this field. + ID FieldID +} + +func (f FieldID) String() string { + return fmt.Sprint(uint32(f)) +} diff --git a/client/schema_description.go b/client/schema_description.go new file mode 100644 index 0000000000..302fadf5e7 --- /dev/null +++ b/client/schema_description.go @@ -0,0 +1,49 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +// SchemaDescription describes a Schema and its associated metadata. +type SchemaDescription struct { + // Root is the version agnostic identifier for this schema. + // + // It remains constant throughout the lifetime of this schema. + Root string + + // VersionID is the version-specific identifier for this schema. + // + // It is generated on mutation of this schema and can be used to uniquely + // identify a schema at a specific version. + VersionID string + + // Name is the name of this Schema. + // + // It is currently used to define the Collection Name, and as such these two properties + // will currently share the same name. + // + // It is immutable. + Name string + + // Fields contains the fields within this Schema. + // + // Currently new fields may be added after initial declaration, but they cannot be removed. + Fields []SchemaFieldDescription +} + +// GetFieldByName returns the field for the given field name. If such a field is found it +// will return it and true, if it is not found it will return false. +func (s SchemaDescription) GetFieldByName(fieldName string) (SchemaFieldDescription, bool) { + for _, field := range s.Fields { + if field.Name == fieldName { + return field, true + } + } + return SchemaFieldDescription{}, false +} diff --git a/client/schema_field_description.go b/client/schema_field_description.go new file mode 100644 index 0000000000..05b7f99a5e --- /dev/null +++ b/client/schema_field_description.go @@ -0,0 +1,171 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import "fmt" + +// FieldKind describes the type of a field. +type FieldKind uint8 + +// SchemaFieldDescription describes a field on a Schema and its associated metadata. +type SchemaFieldDescription struct { + // Name contains the name of this field. + // + // It is currently immutable. + Name string + + // The data type that this field holds. + // + // Must contain a valid value. It is currently immutable. + Kind FieldKind + + // Schema contains the schema name of the type this field contains if this field is + // a relation field. Otherwise this will be empty. + Schema string + + // RelationName the name of the relationship that this field represents if this field is + // a relation field. Otherwise this will be empty. + RelationName string + + // The CRDT Type of this field. If no type has been provided it will default to [LWW_REGISTER]. + // + // It is currently immutable. + Typ CType + + // If true, this is the primary half of a relation, otherwise is false. + IsPrimaryRelation bool +} + +func (f FieldKind) String() string { + switch f { + case FieldKind_DocID: + return "ID" + case FieldKind_NILLABLE_BOOL: + return "Boolean" + case FieldKind_NILLABLE_BOOL_ARRAY: + return "[Boolean]" + case FieldKind_BOOL_ARRAY: + return "[Boolean!]" + case FieldKind_NILLABLE_INT: + return "Int" + case FieldKind_NILLABLE_INT_ARRAY: + return "[Int]" + case FieldKind_INT_ARRAY: + return "[Int!]" + case FieldKind_NILLABLE_DATETIME: + return "DateTime" + case FieldKind_NILLABLE_FLOAT: + return "Float" + case FieldKind_NILLABLE_FLOAT_ARRAY: + return "[Float]" + case FieldKind_FLOAT_ARRAY: + return "[Float!]" + case FieldKind_NILLABLE_STRING: + return "String" + case FieldKind_NILLABLE_STRING_ARRAY: + return "[String]" + case FieldKind_STRING_ARRAY: + return "[String!]" + case FieldKind_NILLABLE_BLOB: + return "Blob" + case FieldKind_NILLABLE_JSON: + return "JSON" + default: + return fmt.Sprint(uint8(f)) + } +} + +// IsObject returns true if this FieldKind is an object type. +func (f FieldKind) IsObject() bool { + return f == FieldKind_FOREIGN_OBJECT || + f == FieldKind_FOREIGN_OBJECT_ARRAY +} + +// IsObjectArray returns true if this FieldKind is an object array type. +func (f FieldKind) IsObjectArray() bool { + return f == FieldKind_FOREIGN_OBJECT_ARRAY +} + +// IsArray returns true if this FieldKind is an array type which includes inline arrays as well +// as relation arrays. +func (f FieldKind) IsArray() bool { + return f == FieldKind_BOOL_ARRAY || + f == FieldKind_INT_ARRAY || + f == FieldKind_FLOAT_ARRAY || + f == FieldKind_STRING_ARRAY || + f == FieldKind_FOREIGN_OBJECT_ARRAY || + f == FieldKind_NILLABLE_BOOL_ARRAY || + f == FieldKind_NILLABLE_INT_ARRAY || + f == FieldKind_NILLABLE_FLOAT_ARRAY || + f == FieldKind_NILLABLE_STRING_ARRAY +} + +// Note: These values are serialized and persisted in the database, avoid modifying existing values. +const ( + FieldKind_None FieldKind = 0 + FieldKind_DocID FieldKind = 1 + FieldKind_NILLABLE_BOOL FieldKind = 2 + FieldKind_BOOL_ARRAY FieldKind = 3 + FieldKind_NILLABLE_INT FieldKind = 4 + FieldKind_INT_ARRAY FieldKind = 5 + FieldKind_NILLABLE_FLOAT FieldKind = 6 + FieldKind_FLOAT_ARRAY FieldKind = 7 + _ FieldKind = 8 // safe to repurpose (was never used) + _ FieldKind = 9 // safe to repurpose (previously old field) + FieldKind_NILLABLE_DATETIME FieldKind = 10 + FieldKind_NILLABLE_STRING FieldKind = 11 + FieldKind_STRING_ARRAY FieldKind = 12 + FieldKind_NILLABLE_BLOB FieldKind = 13 + FieldKind_NILLABLE_JSON FieldKind = 14 + _ FieldKind = 15 // safe to repurpose (was never used) + + // Embedded object, but accessed via foreign keys + FieldKind_FOREIGN_OBJECT FieldKind = 16 + + // Array of embedded objects, accessed via foreign keys + FieldKind_FOREIGN_OBJECT_ARRAY FieldKind = 17 + + FieldKind_NILLABLE_BOOL_ARRAY FieldKind = 18 + FieldKind_NILLABLE_INT_ARRAY FieldKind = 19 + FieldKind_NILLABLE_FLOAT_ARRAY FieldKind = 20 + FieldKind_NILLABLE_STRING_ARRAY FieldKind = 21 +) + +// FieldKindStringToEnumMapping maps string representations of [FieldKind] values to +// their enum values. +// +// It is currently used to by [db.PatchSchema] to allow string representations of +// [FieldKind] to be provided instead of their raw int values. This usage may expand +// in the future. They currently roughly correspond to the GQL field types, but this +// equality is not guaranteed. +var FieldKindStringToEnumMapping = map[string]FieldKind{ + "ID": FieldKind_DocID, + "Boolean": FieldKind_NILLABLE_BOOL, + "[Boolean]": FieldKind_NILLABLE_BOOL_ARRAY, + "[Boolean!]": FieldKind_BOOL_ARRAY, + "Int": FieldKind_NILLABLE_INT, + "[Int]": FieldKind_NILLABLE_INT_ARRAY, + "[Int!]": FieldKind_INT_ARRAY, + "DateTime": FieldKind_NILLABLE_DATETIME, + "Float": FieldKind_NILLABLE_FLOAT, + "[Float]": FieldKind_NILLABLE_FLOAT_ARRAY, + "[Float!]": FieldKind_FLOAT_ARRAY, + "String": FieldKind_NILLABLE_STRING, + "[String]": FieldKind_NILLABLE_STRING_ARRAY, + "[String!]": FieldKind_STRING_ARRAY, + "Blob": FieldKind_NILLABLE_BLOB, + "JSON": FieldKind_NILLABLE_JSON, +} + +// IsRelation returns true if this field is a relation. +func (f SchemaFieldDescription) IsRelation() bool { + return f.RelationName != "" +} From d1b1a76d232d1b412f8f1e1e0410ae8d33417c81 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 19 Mar 2024 10:08:11 -0700 Subject: [PATCH 04/49] refactor: Replace logging package with corelog (#2406) ## Relevant issue(s) Resolves #2405 ## Description This PR replaces the logging package with the `corelog` library. Please review along with https://github.com/sourcenetwork/corelog/pull/2 `corelog` utilizes the `log/slog` standard library package to produce structured logs. The interface is kept mostly the same with some modifications: - `Debug` and `Fatal` levels have been removed - `context` for logging calls has been moved to a context specific variant. For example `Info` has a `InfoContext` variant. - `Feedback` methods have been removed in favor of restricting the output to `stderr` and `stdout`. - Supported output formats are `text` or `json`. `csv` is not supported by `slog` - Support for colored logs is not yet implemented, but can be easily added in a future PR. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Manually tested Specify the platform(s) on which this was tested: - MacOS --- cli/cli.go | 4 +- cli/config.go | 67 +- cli/config_test.go | 8 +- cli/root.go | 24 +- cli/server_dump.go | 2 +- cli/start.go | 8 +- datastore/blockstore.go | 5 +- datastore/store.go | 5 +- db/collection.go | 2 +- db/collection_delete.go | 2 +- db/collection_update.go | 2 +- db/db.go | 17 +- db/subscriptions.go | 2 +- docs/config.md | 26 +- go.mod | 5 +- go.sum | 2 + http/logger.go | 17 +- logging/config.go | 264 ------- logging/doc.go | 26 - logging/logger.go | 311 -------- logging/logging.go | 77 -- logging/logging_test.go | 1011 -------------------------- logging/registry.go | 57 -- merkle/clock/clock.go | 16 +- merkle/clock/heads.go | 12 +- merkle/crdt/composite.go | 2 - merkle/crdt/merklecrdt.go | 5 - net/client.go | 15 - net/dag.go | 22 +- net/dag_test.go | 1 - net/net.go | 6 +- net/node.go | 22 +- net/peer.go | 97 ++- net/process.go | 29 +- net/server.go | 58 +- node/node.go | 8 +- tests/bench/bench_util.go | 10 +- tests/gen/cli/util_test.go | 8 +- tests/integration/explain.go | 4 +- tests/integration/net/order/utils.go | 34 +- tests/integration/p2p.go | 4 +- tests/integration/utils2.go | 28 +- 42 files changed, 246 insertions(+), 2079 deletions(-) delete mode 100644 logging/config.go delete mode 100644 logging/doc.go delete mode 100644 logging/logger.go delete mode 100644 logging/logging.go delete mode 100644 logging/logging_test.go delete mode 100644 logging/registry.go diff --git a/cli/cli.go b/cli/cli.go index c40c6528d8..33f58ac8e9 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -16,10 +16,10 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/logging" + "github.com/sourcenetwork/corelog" ) -var log = logging.MustNewLogger("cli") +var log = corelog.NewLogger("cli") // NewDefraCommand returns the root command instanciated with its tree of subcommands. func NewDefraCommand() *cobra.Command { diff --git a/cli/config.go b/cli/config.go index bb57a8cb3d..54d7529121 100644 --- a/cli/config.go +++ b/cli/config.go @@ -15,10 +15,9 @@ import ( "path/filepath" "strings" + "github.com/sourcenetwork/corelog" "github.com/spf13/pflag" "github.com/spf13/viper" - - "github.com/sourcenetwork/defradb/logging" ) const ( @@ -41,11 +40,12 @@ var configPaths = []string{ // configFlags is a mapping of config keys to cli flags to bind to. var configFlags = map[string]string{ - "log.level": "loglevel", - "log.output": "logoutput", - "log.format": "logformat", - "log.stacktrace": "logtrace", - "log.nocolor": "lognocolor", + "log.level": "log-level", + "log.output": "log-output", + "log.format": "log-format", + "log.stacktrace": "log-stacktrace", + "log.source": "log-source", + "log.overrides": "log-overrides", "api.address": "url", "datastore.maxtxnretries": "max-txn-retries", "datastore.store": "store", @@ -125,14 +125,17 @@ func loadConfig(rootdir string, flags *pflag.FlagSet) (*viper.Viper, error) { } } - logCfg := loggingConfig(cfg.Sub("log")) - logCfg.OverridesByLoggerName = make(map[string]logging.Config) + // set default logging config + corelog.SetConfig(corelog.Config{ + Level: cfg.GetString("log.level"), + Format: cfg.GetString("log.format"), + Output: cfg.GetString("log.output"), + EnableStackTrace: cfg.GetBool("log.stacktrace"), + EnableSource: cfg.GetBool("log.source"), + }) - // apply named logging overrides - for key := range cfg.GetStringMap("log.overrides") { - logCfg.OverridesByLoggerName[key] = loggingConfig(cfg.Sub("log.overrides." + key)) - } - logging.SetConfig(logCfg) + // set logging config overrides + corelog.SetConfigOverrides(cfg.GetString("log.overrides")) return cfg, nil } @@ -147,39 +150,3 @@ func bindConfigFlags(cfg *viper.Viper, flags *pflag.FlagSet) error { } return nil } - -// loggingConfig returns a new logging config from the given config. -func loggingConfig(cfg *viper.Viper) logging.Config { - var level int8 - switch value := cfg.GetString("level"); value { - case configLogLevelDebug: - level = logging.Debug - case configLogLevelInfo: - level = logging.Info - case configLogLevelError: - level = logging.Error - case configLogLevelFatal: - level = logging.Fatal - default: - level = logging.Info - } - - var format logging.EncoderFormat - switch value := cfg.GetString("format"); value { - case configLogFormatJSON: - format = logging.JSON - case configLogFormatCSV: - format = logging.CSV - default: - format = logging.CSV - } - - return logging.Config{ - Level: logging.NewLogLevelOption(level), - EnableStackTrace: logging.NewEnableStackTraceOption(cfg.GetBool("stacktrace")), - DisableColor: logging.NewDisableColorOption(cfg.GetBool("nocolor")), - EncoderFormat: logging.NewEncoderFormatOption(format), - OutputPaths: []string{cfg.GetString("output")}, - EnableCaller: logging.NewEnableCallerOption(cfg.GetBool("caller")), - } -} diff --git a/cli/config_test.go b/cli/config_test.go index 210743477c..492774398c 100644 --- a/cli/config_test.go +++ b/cli/config_test.go @@ -53,9 +53,9 @@ func TestLoadConfigNotExist(t *testing.T) { assert.Equal(t, []string{}, cfg.GetStringSlice("net.peers")) assert.Equal(t, "info", cfg.GetString("log.level")) - assert.Equal(t, false, cfg.GetBool("log.stacktrace")) - assert.Equal(t, "csv", cfg.GetString("log.format")) assert.Equal(t, "stderr", cfg.GetString("log.output")) - assert.Equal(t, false, cfg.GetBool("log.nocolor")) - assert.Equal(t, false, cfg.GetBool("log.caller")) + assert.Equal(t, "text", cfg.GetString("log.format")) + assert.Equal(t, false, cfg.GetBool("log.stacktrace")) + assert.Equal(t, false, cfg.GetBool("log.source")) + assert.Equal(t, "", cfg.GetString("log.overrides")) } diff --git a/cli/root.go b/cli/root.go index e4ba349f76..6ba7af1f1c 100644 --- a/cli/root.go +++ b/cli/root.go @@ -38,33 +38,39 @@ Start a DefraDB node, interact with a local or remote node, and much more. ) cmd.PersistentFlags().String( - "loglevel", + "log-level", "info", "Log level to use. Options are debug, info, error, fatal", ) cmd.PersistentFlags().String( - "logoutput", + "log-output", "stderr", - "Log output path", + "Log output path. Options are stderr or stdout.", ) cmd.PersistentFlags().String( - "logformat", - "csv", - "Log format to use. Options are csv, json", + "log-format", + "text", + "Log format to use. Options are text or json", ) cmd.PersistentFlags().Bool( - "logtrace", + "log-stacktrace", false, "Include stacktrace in error and fatal logs", ) cmd.PersistentFlags().Bool( - "lognocolor", + "log-source", false, - "Disable colored log output", + "Include source location in logs", + ) + + cmd.PersistentFlags().String( + "log-overrides", + "", + "Logger config overrides. Format ,=,...;,...", ) cmd.PersistentFlags().String( diff --git a/cli/server_dump.go b/cli/server_dump.go index eb364a247f..767b86f364 100644 --- a/cli/server_dump.go +++ b/cli/server_dump.go @@ -24,7 +24,7 @@ func MakeServerDumpCmd() *cobra.Command { Short: "Dumps the state of the entire database", RunE: func(cmd *cobra.Command, _ []string) error { cfg := mustGetContextConfig(cmd) - log.FeedbackInfo(cmd.Context(), "Dumping DB state...") + log.InfoContext(cmd.Context(), "Dumping DB state...") if cfg.GetString("datastore.store") != configStoreBadger { return errors.New("server-side dump is only supported for the Badger datastore") diff --git a/cli/start.go b/cli/start.go index d4e789cbc6..90ca08d77a 100644 --- a/cli/start.go +++ b/cli/start.go @@ -108,11 +108,11 @@ func MakeStartCommand() *cobra.Command { defer func() { if err := n.Close(cmd.Context()); err != nil { - log.FeedbackErrorE(cmd.Context(), "Stopping DefraDB", err) + log.ErrorContextE(cmd.Context(), "Stopping DefraDB", err) } }() - log.FeedbackInfo(cmd.Context(), "Starting DefraDB") + log.InfoContext(cmd.Context(), "Starting DefraDB") if err := n.Start(cmd.Context()); err != nil { return err } @@ -122,9 +122,9 @@ func MakeStartCommand() *cobra.Command { select { case <-cmd.Context().Done(): - log.FeedbackInfo(cmd.Context(), "Received context cancellation; shutting down...") + log.InfoContext(cmd.Context(), "Received context cancellation; shutting down...") case <-signalCh: - log.FeedbackInfo(cmd.Context(), "Received interrupt; shutting down...") + log.InfoContext(cmd.Context(), "Received interrupt; shutting down...") } return nil diff --git a/datastore/blockstore.go b/datastore/blockstore.go index 8525f8410e..be25894a3d 100644 --- a/datastore/blockstore.go +++ b/datastore/blockstore.go @@ -64,7 +64,6 @@ func (bs *bstore) HashOnRead(enabled bool) { // Get returns a block from the blockstore. func (bs *bstore) Get(ctx context.Context, k cid.Cid) (blocks.Block, error) { if !k.Defined() { - log.Error(ctx, "Undefined CID in blockstore") return nil, ipld.ErrNotFound{Cid: k} } bdata, err := bs.store.Get(ctx, dshelp.MultihashToDsKey(k.Hash())) @@ -164,13 +163,13 @@ func (bs *bstore) AllKeysChan(ctx context.Context) (<-chan cid.Cid, error) { return } if e.Error != nil { - log.ErrorE(ctx, "Blockstore.AllKeysChan errored", e.Error) + log.ErrorContextE(ctx, "Blockstore.AllKeysChan errored", e.Error) return } hash, err := dshelp.DsKeyToMultihash(ds.RawKey(e.Key)) if err != nil { - log.ErrorE(ctx, "Error parsing key from binary", err) + log.ErrorContextE(ctx, "Error parsing key from binary", err) continue } k := cid.NewCidV1(cid.Raw, hash) diff --git a/datastore/store.go b/datastore/store.go index 759eef01db..7f2764a65d 100644 --- a/datastore/store.go +++ b/datastore/store.go @@ -14,12 +14,13 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" ds "github.com/ipfs/go-datastore" + "github.com/sourcenetwork/corelog" + "github.com/sourcenetwork/defradb/datastore/iterable" - "github.com/sourcenetwork/defradb/logging" ) var ( - log = logging.MustNewLogger("store") + log = corelog.NewLogger("store") ) // RootStore wraps Batching and TxnDatastore requiring datastore to support both batching and transactions. diff --git a/db/collection.go b/db/collection.go index 23ef06d9c4..566722fca9 100644 --- a/db/collection.go +++ b/db/collection.go @@ -1151,7 +1151,7 @@ func (c *collection) getAllDocIDsChan( go func() { defer func() { if err := q.Close(); err != nil { - log.ErrorE(ctx, errFailedtoCloseQueryReqAllIDs, err) + log.ErrorContextE(ctx, errFailedtoCloseQueryReqAllIDs, err) } close(resCh) c.discardImplicitTxn(ctx, txn) diff --git a/db/collection_delete.go b/db/collection_delete.go index 785b2830d7..371c454532 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -179,7 +179,7 @@ func (c *collection) deleteWithFilter( // If the plan isn't properly closed at any exit point log the error. defer func() { if err := selectionPlan.Close(); err != nil { - log.ErrorE(ctx, "Failed to close the request plan, after filter delete", err) + log.ErrorContextE(ctx, "Failed to close the request plan, after filter delete", err) } }() diff --git a/db/collection_update.go b/db/collection_update.go index fc985d2c41..7ddd868e47 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -240,7 +240,7 @@ func (c *collection) updateWithFilter( // If the plan isn't properly closed at any exit point log the error. defer func() { if err := selectionPlan.Close(); err != nil { - log.ErrorE(ctx, "Failed to close the selection plan, after filter update", err) + log.ErrorContextE(ctx, "Failed to close the selection plan, after filter update", err) } }() diff --git a/db/db.go b/db/db.go index 7b3ff7bcb8..30036fc55d 100644 --- a/db/db.go +++ b/db/db.go @@ -22,6 +22,7 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" ds "github.com/ipfs/go-datastore" dsq "github.com/ipfs/go-datastore/query" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" @@ -30,12 +31,11 @@ import ( "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/lens" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/request/graphql" ) var ( - log = logging.MustNewLogger("db") + log = corelog.NewLogger("db") ) // make sure we match our client interface @@ -109,7 +109,6 @@ func NewDB(ctx context.Context, rootstore datastore.RootStore, options ...Option } func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option) (*implicitTxnDB, error) { - log.Debug(ctx, "Loading: internal datastores") multistore := datastore.MultiStoreFrom(rootstore) parser, err := graphql.NewParser() @@ -197,7 +196,6 @@ func (db *db) initialize(ctx context.Context) error { } defer txn.Discard(ctx) - log.Debug(ctx, "Checking if DB has already been initialized...") exists, err := txn.Systemstore().Has(ctx, ds.NewKey("init")) if err != nil && !errors.Is(err, ds.ErrNotFound) { return err @@ -205,7 +203,6 @@ func (db *db) initialize(ctx context.Context) error { // if we're loading an existing database, just load the schema // and migrations and finish initialization if exists { - log.Debug(ctx, "DB has already been initialized, continuing") err = db.loadSchema(ctx, txn) if err != nil { return err @@ -222,8 +219,6 @@ func (db *db) initialize(ctx context.Context) error { return txn.Commit(ctx) } - log.Debug(ctx, "Opened a new DB, needs full initialization") - // init meta data // collection sequence _, err = db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) @@ -261,16 +256,16 @@ func (db *db) PrintDump(ctx context.Context) error { // Close is called when we are shutting down the database. // This is the place for any last minute cleanup or releasing of resources (i.e.: Badger instance). func (db *db) Close() { - log.Info(context.Background(), "Closing DefraDB process...") + log.Info("Closing DefraDB process...") if db.events.Updates.HasValue() { db.events.Updates.Value().Close() } err := db.rootstore.Close() if err != nil { - log.ErrorE(context.Background(), "Failure closing running process", err) + log.ErrorE("Failure closing running process", err) } - log.Info(context.Background(), "Successfully closed running process") + log.Info("Successfully closed running process") } func printStore(ctx context.Context, store datastore.DSReaderWriter) error { @@ -286,7 +281,7 @@ func printStore(ctx context.Context, store datastore.DSReaderWriter) error { } for r := range results.Next() { - log.Info(ctx, "", logging.NewKV(r.Key, r.Value)) + log.InfoContext(ctx, "", corelog.Any(r.Key, r.Value)) } return results.Close() diff --git a/db/subscriptions.go b/db/subscriptions.go index 2e7d2d4123..bc013ae587 100644 --- a/db/subscriptions.go +++ b/db/subscriptions.go @@ -55,7 +55,7 @@ func (db *db) handleSubscription( for evt := range pub.Event() { txn, err := db.NewTxn(ctx, false) if err != nil { - log.Error(ctx, err.Error()) + log.ErrorContext(ctx, err.Error()) continue } diff --git a/docs/config.md b/docs/config.md index 5f8985f71c..80c6d437ec 100644 --- a/docs/config.md +++ b/docs/config.md @@ -63,4 +63,28 @@ https://docs.libp2p.io/concepts/addressing/ Enable libp2p's Circuit relay transport protocol. Defaults to `false`. -https://docs.libp2p.io/concepts/circuit-relay/ \ No newline at end of file +https://docs.libp2p.io/concepts/circuit-relay/ + +## `log.level` + +Log level to use. Options are `info` or `error`. Defaults to `info`. + +## `log.output` + +Log output path. Options are `stderr` or `stdout`. Defaults to `stderr`. + +## `log.format` + +Log format to use. Options are `text` or `json`. Defaults to `text`. + +## `log.stacktrace` + +Include stacktrace in error and fatal logs. Defaults to `false`. + +## `log.source` + +Include source location in logs. Defaults to `false`. + +## `log.overrides` + +Logger config overrides. Format `,=,...;,...`. diff --git a/go.mod b/go.mod index a35f170cb3..eff296f3e3 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/sourcenetwork/defradb -go 1.21 +go 1.21.3 require ( github.com/bits-and-blooms/bitset v1.13.0 @@ -18,7 +18,6 @@ require ( github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 github.com/ipfs/go-ipld-format v0.6.0 - github.com/ipfs/go-log v1.0.5 github.com/ipfs/go-log/v2 v2.5.1 github.com/jbenet/goprocess v0.1.4 github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c @@ -31,6 +30,7 @@ require ( github.com/multiformats/go-multibase v0.2.0 github.com/multiformats/go-multihash v0.2.3 github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 + github.com/sourcenetwork/corelog v0.0.6 github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13 github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd github.com/sourcenetwork/immutable v0.3.0 @@ -99,6 +99,7 @@ require ( github.com/ipfs/go-ipfs-pq v0.0.3 // indirect github.com/ipfs/go-ipfs-util v0.0.3 // indirect github.com/ipfs/go-ipld-legacy v0.2.1 // indirect + github.com/ipfs/go-log v1.0.5 // indirect github.com/ipfs/go-metrics-interface v0.0.1 // indirect github.com/ipfs/go-peertaskqueue v0.8.1 // indirect github.com/ipfs/kubo v0.25.0 // indirect diff --git a/go.sum b/go.sum index 9d119e9967..22c2ef750c 100644 --- a/go.sum +++ b/go.sum @@ -510,6 +510,8 @@ github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIK github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 h1:TpQDDPfucDgCNH0NVqVUk6SSq6T6G8p9HIocmwZh9Tg= github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276/go.mod h1:lxiZTDBw0vheFMqSwX2OvB6RTDI1+/UtVCSU4rpThFM= +github.com/sourcenetwork/corelog v0.0.6 h1:3q3Kh1G0C4iHimkPrOpAZOKIKESIv4zZ51vKuY8pANA= +github.com/sourcenetwork/corelog v0.0.6/go.mod h1:mXsBA4ujUt0lAkDFoHoXuaIQjhdeXi+RfDNV7ZTiy5E= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13 h1:d/PeGZutd5NcDr6ltAv8ubN5PxsHMp1YUnhHY/QCWB4= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13/go.mod h1:jUoQv592uUX1u7QBjAY4C+l24X9ArhPfifOqXpDHz4U= github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd h1:lmpW39/8wPJ0khWRhOcj7Bj0HYKbSmQ8rXMJw1cMB8U= diff --git a/http/logger.go b/http/logger.go index d23f65e94a..c4e715f695 100644 --- a/http/logger.go +++ b/http/logger.go @@ -15,11 +15,10 @@ import ( "time" "github.com/go-chi/chi/v5/middleware" - - "github.com/sourcenetwork/defradb/logging" + "github.com/sourcenetwork/corelog" ) -var log = logging.MustNewLogger("http") +var log = corelog.NewLogger("http") type logEntry struct { req *http.Request @@ -28,14 +27,14 @@ type logEntry struct { var _ middleware.LogEntry = (*logEntry)(nil) func (e *logEntry) Write(status, bytes int, header http.Header, elapsed time.Duration, extra any) { - log.Info( + log.InfoContext( e.req.Context(), "Request", - logging.NewKV("Method", e.req.Method), - logging.NewKV("Path", e.req.URL.Path), - logging.NewKV("Status", status), - logging.NewKV("LengthBytes", bytes), - logging.NewKV("ElapsedTime", elapsed.String()), + corelog.String("Method", e.req.Method), + corelog.String("Path", e.req.URL.Path), + corelog.Int("Status", status), + corelog.Int("LengthBytes", bytes), + corelog.Duration("ElapsedTime", elapsed), ) } diff --git a/logging/config.go b/logging/config.go deleted file mode 100644 index 63cde2ceb5..0000000000 --- a/logging/config.go +++ /dev/null @@ -1,264 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package logging - -import ( - "context" - "io" - "os" -) - -type ( - // EncoderFormat is the format of the log output (JSON, CSV, ...). - EncoderFormat = int8 - EncoderFormatOption struct { - EncoderFormat EncoderFormat - HasValue bool - } -) - -// NewEncoderFormatOption creates a new EncoderFormatOption with the given value. -func NewEncoderFormatOption(v EncoderFormat) EncoderFormatOption { - return EncoderFormatOption{ - EncoderFormat: v, - HasValue: true, - } -} - -const ( - stderr = "stderr" - stdout = "stdout" - - JSON EncoderFormat = iota - CSV -) - -type ( - LogLevel = int8 - LogLevelOption struct { - LogLevel LogLevel - HasValue bool - } -) - -func NewLogLevelOption(v LogLevel) LogLevelOption { - return LogLevelOption{ - LogLevel: v, - HasValue: true, - } -} - -// Log levels. -const ( - Debug LogLevel = -1 - Info LogLevel = 0 - Warn LogLevel = 1 - Error LogLevel = 2 - Fatal LogLevel = 5 -) - -type EnableStackTraceOption struct { - EnableStackTrace bool - HasValue bool -} - -type EnableCallerOption struct { - EnableCaller bool - HasValue bool -} - -type DisableColorOption struct { - DisableColor bool - HasValue bool -} - -func NewEnableStackTraceOption(enable bool) EnableStackTraceOption { - return EnableStackTraceOption{ - EnableStackTrace: enable, - HasValue: true, - } -} - -func NewEnableCallerOption(enable bool) EnableCallerOption { - return EnableCallerOption{ - EnableCaller: enable, - HasValue: true, - } -} - -func NewDisableColorOption(disable bool) DisableColorOption { - return DisableColorOption{ - DisableColor: disable, - HasValue: true, - } -} - -type Config struct { - Level LogLevelOption - EncoderFormat EncoderFormatOption - EnableStackTrace EnableStackTraceOption - EnableCaller EnableCallerOption - DisableColor DisableColorOption - OutputPaths []string - OverridesByLoggerName map[string]Config - - Pipe io.Writer // this is used for testing purposes only -} - -func (c Config) forLogger(name string) Config { - loggerConfig := Config{ - Level: c.Level, - EnableStackTrace: c.EnableStackTrace, - DisableColor: c.DisableColor, - EnableCaller: c.EnableCaller, - EncoderFormat: c.EncoderFormat, - OutputPaths: c.OutputPaths, - Pipe: c.Pipe, - } - - if override, hasOverride := c.OverridesByLoggerName[name]; hasOverride { - if override.Level.HasValue { - loggerConfig.Level = override.Level - } - if override.EnableStackTrace.HasValue { - loggerConfig.EnableStackTrace = override.EnableStackTrace - } - if override.EnableCaller.HasValue { - loggerConfig.EnableCaller = override.EnableCaller - } - if override.DisableColor.HasValue { - loggerConfig.DisableColor = override.DisableColor - } - if override.EncoderFormat.HasValue { - loggerConfig.EncoderFormat = override.EncoderFormat - } - if len(override.OutputPaths) != 0 { - loggerConfig.OutputPaths = override.OutputPaths - } - if override.Pipe != nil { - loggerConfig.Pipe = override.Pipe - } - } - - return loggerConfig -} - -func (c Config) copy() Config { - overridesByLoggerName := make(map[string]Config, len(c.OverridesByLoggerName)) - for k, o := range c.OverridesByLoggerName { - overridesByLoggerName[k] = Config{ - Level: o.Level, - EnableStackTrace: o.EnableStackTrace, - EncoderFormat: o.EncoderFormat, - EnableCaller: o.EnableCaller, - DisableColor: o.DisableColor, - OutputPaths: o.OutputPaths, - Pipe: o.Pipe, - } - } - - return Config{ - Level: c.Level, - EnableStackTrace: c.EnableStackTrace, - EncoderFormat: c.EncoderFormat, - OutputPaths: c.OutputPaths, - EnableCaller: c.EnableCaller, - DisableColor: c.DisableColor, - OverridesByLoggerName: overridesByLoggerName, - Pipe: c.Pipe, - } -} - -// Create a new Config given new config options. Each updated Config field is handled. -func (oldConfig Config) with(newConfigOptions Config) Config { - newConfig := oldConfig.copy() - - if newConfigOptions.Level.HasValue { - newConfig.Level = newConfigOptions.Level - } - - if newConfigOptions.EnableStackTrace.HasValue { - newConfig.EnableStackTrace = newConfigOptions.EnableStackTrace - } - - if newConfigOptions.EnableCaller.HasValue { - newConfig.EnableCaller = newConfigOptions.EnableCaller - } - - if newConfigOptions.DisableColor.HasValue { - newConfig.DisableColor = newConfigOptions.DisableColor - } - - if newConfigOptions.EncoderFormat.HasValue { - newConfig.EncoderFormat = newConfigOptions.EncoderFormat - } - - if len(newConfigOptions.OutputPaths) != 0 { - newConfig.OutputPaths = validatePaths(newConfigOptions.OutputPaths) - } - - if newConfigOptions.Pipe != nil { - newConfig.Pipe = newConfigOptions.Pipe - } - - for k, o := range newConfigOptions.OverridesByLoggerName { - // We fully overwrite overrides to allow for ease of - // reset/removal (can provide empty to return to default) - newConfig.OverridesByLoggerName[k] = Config{ - Level: o.Level, - EnableStackTrace: o.EnableStackTrace, - EnableCaller: o.EnableCaller, - DisableColor: o.DisableColor, - EncoderFormat: o.EncoderFormat, - OutputPaths: validatePaths(o.OutputPaths), - Pipe: o.Pipe, - } - } - - return newConfig -} - -// validatePath ensure that all output paths are valid to avoid zap sync errors -// and also to ensure that the logs are not lost. -func validatePaths(paths []string) []string { - validatedPaths := make([]string, 0, len(paths)) - for _, p := range paths { - if p == stderr || p == stdout { - validatedPaths = append(validatedPaths, p) - continue - } - - if f, err := os.OpenFile(p, os.O_CREATE|os.O_APPEND, 0644); err != nil { - log.Info(context.Background(), "cannot use provided path", NewKV("err", err)) - } else { - err := f.Close() - if err != nil { - log.Info(context.Background(), "problem closing file", NewKV("err", err)) - } - - validatedPaths = append(validatedPaths, p) - } - } - - return validatedPaths -} - -func willOutputToStderrOrStdout(paths []string) bool { - if len(paths) == 0 { - return true - } - for _, p := range paths { - if p == stderr || p == stdout { - return true - } - } - return false -} diff --git a/logging/doc.go b/logging/doc.go deleted file mode 100644 index 2f6a0b8827..0000000000 --- a/logging/doc.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -/* -Package logging abstracts away any underlying logging library providing -a single contact-point for the dependency allowing relatively easy -swapping out should we want to. - -This package allows configuration to be loaded and globally applied -after logger instances have been created, utilising an internal thread-safe -registry of named logger instances to apply the config to. - -Configuration may be applied globally, or to logger instances of a specific -name, with the named-configuration being used over the global settings if -both are provided. - -All configuration options are optional. -*/ -package logging diff --git a/logging/logger.go b/logging/logger.go deleted file mode 100644 index f93e305fce..0000000000 --- a/logging/logger.go +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package logging - -import ( - "context" - "fmt" - stdlog "log" - "os" - "sync" - - golog "github.com/ipfs/go-log" - gologV2 "github.com/ipfs/go-log/v2" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" -) - -type logger struct { - name string - logger *zap.Logger - consoleLogger *stdlog.Logger - syncLock sync.RWMutex -} - -var _ Logger = (*logger)(nil) - -func mustNewLogger(name string) *logger { - l, err := buildZapLogger(name, Config{}) - if err != nil { - panic(err) - } - - return &logger{ - name: name, - logger: l, - } -} - -func (l *logger) Debug(ctx context.Context, message string, keyvals ...KV) { - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Debug(message, toZapFields(keyvals)...) -} - -func (l *logger) Info(ctx context.Context, message string, keyvals ...KV) { - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Info(message, toZapFields(keyvals)...) -} - -func (l *logger) Error(ctx context.Context, message string, keyvals ...KV) { - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Error(message, toZapFields(keyvals)...) -} - -func (l *logger) ErrorE(ctx context.Context, message string, err error, keyvals ...KV) { - kvs := keyvals - kvs = append(kvs, NewKV("Error", err.Error())) - kvs = withStackTrace(err, kvs) - - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Error(message, toZapFields(kvs)...) -} - -func (l *logger) Fatal(ctx context.Context, message string, keyvals ...KV) { - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Fatal(message, toZapFields(keyvals)...) -} - -func (l *logger) FatalE(ctx context.Context, message string, err error, keyvals ...KV) { - kvs := keyvals - kvs = append(kvs, NewKV("Error", err.Error())) - kvs = withStackTrace(err, kvs) - - l.syncLock.RLock() - defer l.syncLock.RUnlock() - - l.logger.Fatal(message, toZapFields(kvs)...) -} - -func (l *logger) FeedbackInfo(ctx context.Context, message string, keyvals ...KV) { - l.Info(ctx, message, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - } -} - -func (l *logger) FeedbackError(ctx context.Context, message string, keyvals ...KV) { - l.Error(ctx, message, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - } -} - -func (l *logger) FeedbackErrorE(ctx context.Context, message string, err error, keyvals ...KV) { - l.ErrorE(ctx, message, err, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - if stack, hasStack := getStackTrace(err); hasStack { - l.consoleLogger.Println(stack) - } - } -} - -func (l *logger) FeedbackFatal(ctx context.Context, message string, keyvals ...KV) { - l.Fatal(ctx, message, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - } -} - -func (l *logger) FeedbackFatalE(ctx context.Context, message string, err error, keyvals ...KV) { - l.FatalE(ctx, message, err, keyvals...) - l.syncLock.RLock() - defer l.syncLock.RUnlock() - if l.consoleLogger != nil { - l.consoleLogger.Println(message) - if stack, hasStack := getStackTrace(err); hasStack { - l.consoleLogger.Println(stack) - } - } -} - -func (l *logger) Flush() error { - return l.logger.Sync() -} - -func toZapFields(keyvals []KV) []zap.Field { - result := make([]zap.Field, len(keyvals)) - for i, kv := range keyvals { - result[i] = zap.Any(kv.key, kv.value) - } - return result -} - -func (l *logger) ApplyConfig(config Config) { - newLogger, err := buildZapLogger(l.name, config) - if err != nil { - l.logger.Error("Error applying config to logger", zap.Error(err)) - return - } - - l.syncLock.Lock() - defer l.syncLock.Unlock() - - // We need sync the old log before swapping it out - _ = l.logger.Sync() - l.logger = newLogger - - if !willOutputToStderrOrStdout(config.OutputPaths) { - if config.Pipe != nil { // for testing purposes only - l.consoleLogger = stdlog.New(config.Pipe, "", 0) - } else { - l.consoleLogger = stdlog.New(os.Stderr, "", 0) - } - } else { - l.consoleLogger = nil - } -} - -func withStackTrace(err error, keyvals []KV) []KV { - if stack, hasStack := getStackTrace(err); hasStack { - return append(keyvals, NewKV("stacktrace", stack)) - } - - return keyvals -} - -func getStackTrace(err error) (string, bool) { - configMutex.RLock() - defer configMutex.RUnlock() - - if cachedConfig.EnableStackTrace.EnableStackTrace { - return fmt.Sprintf("%+v", err), true - } - - return "", false -} - -func buildZapLogger(name string, config Config) (*zap.Logger, error) { - const ( - encodingTypeConsole string = "console" - encodingTypeJSON string = "json" - ) - defaultConfig := zap.NewProductionConfig() - defaultConfig.Encoding = encodingTypeConsole - defaultConfig.EncoderConfig.ConsoleSeparator = ", " - defaultConfig.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder - defaultConfig.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder - defaultConfig.DisableStacktrace = true - defaultConfig.DisableCaller = true - - if config.Level.HasValue { - defaultConfig.Level = zap.NewAtomicLevelAt(zapcore.Level(config.Level.LogLevel)) - } - - if config.DisableColor.HasValue && config.DisableColor.DisableColor { - defaultConfig.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder - } - - if config.EnableCaller.HasValue { - defaultConfig.DisableCaller = !config.EnableCaller.EnableCaller - } - - if config.EncoderFormat.HasValue { - if config.EncoderFormat.EncoderFormat == JSON { - defaultConfig.Encoding = encodingTypeJSON - defaultConfig.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder - } else if config.EncoderFormat.EncoderFormat == CSV { - defaultConfig.Encoding = encodingTypeConsole - } - } - - if len(config.OutputPaths) != 0 { - defaultConfig.OutputPaths = config.OutputPaths[:] - } - - // We must skip the first caller, as this will always be our wrapper - newLogger, err := defaultConfig.Build(zap.AddCallerSkip(1)) - if err != nil { - return nil, err - } - - if willOutputToStderrOrStdout(defaultConfig.OutputPaths) && config.Pipe != nil { - newLogger = newLogger.WithOptions(zap.WrapCore(func(zapcore.Core) zapcore.Core { - cfg := zap.NewProductionEncoderConfig() - cfg.ConsoleSeparator = defaultConfig.EncoderConfig.ConsoleSeparator - cfg.EncodeTime = defaultConfig.EncoderConfig.EncodeTime - cfg.EncodeLevel = defaultConfig.EncoderConfig.EncodeLevel - return zapcore.NewCore( - zapcore.NewJSONEncoder(cfg), - zapcore.Lock(zapcore.AddSync(config.Pipe)), - zap.NewAtomicLevelAt(zapcore.Level(config.Level.LogLevel)), - ) - })) - } - - return newLogger.Named(name), nil -} - -/* - The following are wrappers for external packages loggers that are compatible with - our own logger (i.e. Zap based). They offer a way to access the internal logger stores - and apply our configuration. They should implement ApplyConfig. -*/ - -// goLogger is a wrapper for a go-log logger -// Used by github.com/ipfs/go-ipfs-provider -type goLogger struct { - *logger - *golog.ZapEventLogger -} - -func GetGoLogger(name string) *goLogger { - l := mustNewLogger(name) - gl := golog.Logger(name) - return &goLogger{ - logger: l, - ZapEventLogger: gl, - } -} - -func (l *goLogger) ApplyConfig(config Config) { - l.logger.ApplyConfig(config) - l.ZapEventLogger.SugaredLogger = *l.logger.logger.Sugar() -} - -// goLoggerV2 is a wrapper for a go-log V2 logger -// Used by github.com/sourcenetwork/defradb/datastore/badger/v4 -type goLoggerV2 struct { - *logger - *gologV2.ZapEventLogger -} - -func GetGoLoggerV2(name string) *goLoggerV2 { - l := mustNewLogger(name) - gl := gologV2.Logger(name) - return &goLoggerV2{ - logger: l, - ZapEventLogger: gl, - } -} - -func (l *goLoggerV2) ApplyConfig(config Config) { - l.logger.ApplyConfig(config) - l.ZapEventLogger.SugaredLogger = *l.logger.logger.Sugar() -} diff --git a/logging/logging.go b/logging/logging.go deleted file mode 100644 index 1f1883bedb..0000000000 --- a/logging/logging.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package logging - -import ( - "context" -) - -var log = MustNewLogger("logging") - -// KV is a key-value pair used to pass structured data to loggers. -type KV struct { - key string - value any -} - -// NewKV creates a new KV key-value pair. -func NewKV(key string, value any) KV { - return KV{ - key: key, - value: value, - } -} - -type Logger interface { - // Debug logs a message at debug log level. Key-value pairs can be added. - Debug(ctx context.Context, message string, keyvals ...KV) - // Info logs a message at info log level. Key-value pairs can be added. - Info(ctx context.Context, message string, keyvals ...KV) - // Error logs a message at error log level. Key-value pairs can be added. - Error(ctx context.Context, message string, keyvals ...KV) - // ErrorErr logs a message and an error at error log level. Key-value pairs can be added. - ErrorE(ctx context.Context, message string, err error, keyvals ...KV) - // Fatal logs a message at fatal log level. Key-value pairs can be added. - Fatal(ctx context.Context, message string, keyvals ...KV) - // FatalE logs a message and an error at fatal log level. Key-value pairs can be added. - FatalE(ctx context.Context, message string, err error, keyvals ...KV) - - // Feedback prefixed method ensure that messsages reach a user in case the logs are sent to a file. - - // FeedbackInfo calls Info and sends the message to stderr if logs are sent to a file. - FeedbackInfo(ctx context.Context, message string, keyvals ...KV) - // FeedbackError calls Error and sends the message to stderr if logs are sent to a file. - FeedbackError(ctx context.Context, message string, keyvals ...KV) - // FeedbackErrorE calls ErrorE and sends the message to stderr if logs are sent to a file. - FeedbackErrorE(ctx context.Context, message string, err error, keyvals ...KV) - // FeedbackFatal calls Fatal and sends the message to stderr if logs are sent to a file. - FeedbackFatal(ctx context.Context, message string, keyvals ...KV) - // FeedbackFatalE calls FatalE and sends the message to stderr if logs are sent to a file. - FeedbackFatalE(ctx context.Context, message string, err error, keyvals ...KV) - - // Flush flushes any buffered log entries. - Flush() error - // ApplyConfig updates the logger with a new config. - ApplyConfig(config Config) -} - -// MustNewLogger creates and registers a new logger with the given name, and panics if there is an error. -func MustNewLogger(name string) Logger { - logger := mustNewLogger(name) - register(name, logger) - return logger -} - -// SetConfig updates all registered loggers with the given config. -func SetConfig(newConfig Config) { - updatedConfig := setConfig(newConfig) - updateLoggers(updatedConfig) -} diff --git a/logging/logging_test.go b/logging/logging_test.go deleted file mode 100644 index 5a19cfb744..0000000000 --- a/logging/logging_test.go +++ /dev/null @@ -1,1011 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -// todo: The logger(s) appear to leak resources and do not close down promptly on windows, -// the log files have open readers when the Golang test runner attempts to delete them. -// See https://github.com/sourcenetwork/defradb/issues/2057 for more info. - -//go:build !windows - -package logging - -import ( - "bufio" - "bytes" - "context" - "encoding/json" - "io" - "os" - "os/exec" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/errors" -) - -func TestLogWritesFatalMessageToLogAndKillsProcess(t *testing.T) { - logMessage := "test log message" - - if os.Getenv("OS_EXIT") == "1" { - ctx := context.Background() - logPath := os.Getenv("LOG_PATH") - logger, logPath := getLogger(t, func(c *Config) { - c.OutputPaths = []string{logPath} - }) - - logger.Fatal(ctx, logMessage) - return - } - - dir := t.TempDir() - logPath := dir + "/log.txt" - cmd := exec.Command(os.Args[0], "-test.run=TestLogWritesFatalMessageToLogAndKillsProcess") - cmd.Env = append(os.Environ(), "OS_EXIT=1", "LOG_PATH="+logPath) - err := cmd.Run() - if e, ok := err.(*exec.ExitError); !ok || e.Success() { - t.Fatalf("Logger.Fatal failed to kill the process, error: %v", err) - } - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "FATAL", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") - // stacktrace is disabled by default - assert.NotContains(t, logLines[0], "stacktrace") -} - -func TestLogWritesFatalMessageWithStackTraceToLogAndKillsProcessGivenStackTraceEnabled(t *testing.T) { - logMessage := "test log message" - - if os.Getenv("OS_EXIT") == "1" { - ctx := context.Background() - logPath := os.Getenv("LOG_PATH") - logger, logPath := getLogger(t, func(c *Config) { - c.OutputPaths = []string{logPath} - c.EnableStackTrace = NewEnableStackTraceOption(true) - }) - - logger.Fatal(ctx, logMessage) - return - } - - dir := t.TempDir() - logPath := dir + "/log.txt" - cmd := exec.Command(os.Args[0], "-test.run=TestLogWritesFatalMessageWithStackTraceToLogAndKillsProcessGivenStackTraceEnabled") - cmd.Env = append(os.Environ(), "OS_EXIT=1", "LOG_PATH="+logPath) - err := cmd.Run() - if e, ok := err.(*exec.ExitError); !ok || e.Success() { - t.Fatalf("Logger.Fatal failed to kill the process, error: %v", err) - } - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "FATAL", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // no stacktrace will be present since no error was sent to the logger. - assert.NotContains(t, logLines[0], "stacktrace") - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogWritesFatalEMessageToLogAndKillsProcess(t *testing.T) { - logMessage := "test log message" - - if os.Getenv("OS_EXIT") == "1" { - ctx := context.Background() - logPath := os.Getenv("LOG_PATH") - logger, logPath := getLogger(t, func(c *Config) { - c.OutputPaths = []string{logPath} - }) - - logger.FatalE(ctx, logMessage, errors.New("dummy error")) - return - } - - dir := t.TempDir() - logPath := dir + "/log.txt" - cmd := exec.Command(os.Args[0], "-test.run=TestLogWritesFatalEMessageToLogAndKillsProcess") - cmd.Env = append(os.Environ(), "OS_EXIT=1", "LOG_PATH="+logPath) - err := cmd.Run() - if e, ok := err.(*exec.ExitError); !ok || e.Success() { - t.Fatalf("Logger.Fatal failed to kill the process, error: %v", err) - } - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "FATAL", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") - // stacktrace is disabled by default - assert.NotContains(t, logLines[0], "stacktrace") -} - -func TestLogWritesFatalEMessageWithStackTraceToLogAndKillsProcessGivenStackTraceEnabled(t *testing.T) { - logMessage := "test log message" - - if os.Getenv("OS_EXIT") == "1" { - ctx := context.Background() - logPath := os.Getenv("LOG_PATH") - logger, logPath := getLogger(t, func(c *Config) { - c.OutputPaths = []string{logPath} - c.EnableStackTrace = NewEnableStackTraceOption(true) - }) - - logger.FatalE(ctx, logMessage, errors.New("dummy error")) - return - } - - dir := t.TempDir() - logPath := dir + "/log.txt" - cmd := exec.Command(os.Args[0], "-test.run=TestLogWritesFatalEMessageWithStackTraceToLogAndKillsProcessGivenStackTraceEnabled") - cmd.Env = append(os.Environ(), "OS_EXIT=1", "LOG_PATH="+logPath) - err := cmd.Run() - if e, ok := err.(*exec.ExitError); !ok || e.Success() { - t.Fatalf("Logger.Fatal failed to kill the process, error: %v", err) - } - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "FATAL", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - assert.Contains(t, logLines[0], "stacktrace") - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -type LogLevelTestCase struct { - LogLevel LogLevel - LogFunc func(Logger, context.Context, string) - ExpectedLogLevel string - WithStackTrace bool - ExpectStackTrace bool - WithCaller bool -} - -func logDebug(l Logger, c context.Context, m string) { l.Debug(c, m) } -func logInfo(l Logger, c context.Context, m string) { l.Info(c, m) } -func logError(l Logger, c context.Context, m string) { l.Error(c, m) } -func logErrorE(l Logger, c context.Context, m string) { l.ErrorE(c, m, errors.New("test error")) } - -func getLogLevelTestCase() []LogLevelTestCase { - return []LogLevelTestCase{ - {Debug, logDebug, "DEBUG", false, false, true}, - {Debug, logDebug, "DEBUG", false, false, false}, - {Debug, logInfo, "INFO", false, false, false}, - {Debug, logError, "ERROR", false, false, false}, - {Debug, logError, "ERROR", true, false, false}, - {Debug, logErrorE, "ERROR", false, false, false}, - {Debug, logErrorE, "ERROR", true, true, false}, - {Info, logDebug, "", false, false, false}, - {Info, logInfo, "INFO", false, false, true}, - {Info, logInfo, "INFO", false, false, false}, - {Info, logError, "ERROR", false, false, false}, - {Info, logError, "ERROR", true, false, false}, - {Info, logErrorE, "ERROR", false, false, false}, - {Info, logErrorE, "ERROR", true, true, false}, - {Warn, logDebug, "", false, false, false}, - {Warn, logInfo, "", false, false, false}, - {Warn, logError, "ERROR", false, false, false}, - {Warn, logError, "ERROR", true, false, false}, - {Warn, logErrorE, "ERROR", false, false, false}, - {Warn, logErrorE, "ERROR", true, true, false}, - {Error, logDebug, "", false, false, false}, - {Error, logInfo, "", false, false, false}, - {Error, logError, "ERROR", false, false, true}, - {Error, logError, "ERROR", false, false, false}, - {Error, logError, "ERROR", true, false, false}, - {Error, logErrorE, "ERROR", false, false, false}, - {Error, logErrorE, "ERROR", true, true, false}, - {Fatal, logDebug, "", false, false, true}, - {Fatal, logDebug, "", false, false, false}, - {Fatal, logInfo, "", false, false, false}, - {Fatal, logError, "", false, false, false}, - {Fatal, logErrorE, "", false, false, false}, - } -} - -func TestLogWritesMessagesToLog(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.EnableStackTrace = NewEnableStackTraceOption(tc.WithStackTrace) - c.EnableCaller = NewEnableCallerOption(tc.WithCaller) - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - _, hasStackTrace := logLines[0]["stacktrace"] - assert.Equal(t, tc.ExpectStackTrace, hasStackTrace) - _, hasCaller := logLines[0]["caller"] - assert.Equal(t, tc.WithCaller, hasCaller) - } - - clearRegistry("TestLogName") - } -} - -func TestLogWritesMessagesToLogGivenUpdatedLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - }) - SetConfig(Config{ - Level: NewLogLevelOption(tc.LogLevel), - EnableStackTrace: NewEnableStackTraceOption(tc.WithStackTrace), - EnableCaller: NewEnableCallerOption(tc.WithCaller), - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - _, hasStackTrace := logLines[0]["stacktrace"] - assert.Equal(t, tc.ExpectStackTrace, hasStackTrace) - _, hasCaller := logLines[0]["caller"] - assert.Equal(t, tc.WithCaller, hasCaller) - } - - clearRegistry("TestLogName") - } -} - -func TestLogWritesMessagesToLogGivenUpdatedContextLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - }) - SetConfig(Config{ - Level: NewLogLevelOption(Error), - }) - SetConfig(Config{ - Level: NewLogLevelOption(tc.LogLevel), - EnableStackTrace: NewEnableStackTraceOption(tc.WithStackTrace), - EnableCaller: NewEnableCallerOption(tc.WithCaller), - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - _, hasStackTrace := logLines[0]["stacktrace"] - assert.Equal(t, tc.ExpectStackTrace, hasStackTrace) - _, hasCaller := logLines[0]["caller"] - assert.Equal(t, tc.WithCaller, hasCaller) - } - - clearRegistry("TestLogName") - } -} - -func TestLogDoesntWriteMessagesToLogGivenNoLogPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - b := &bytes.Buffer{} - logger, _ := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.OutputPaths = []string{} - c.Pipe = b - }) - - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - } - - clearRegistry("TestLogName") - } -} - -func TestLogDoesntWriteMessagesToLogGivenNotFoundLogPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - b := &bytes.Buffer{} - logger, _ := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.OutputPaths = []string{"/path/not/found"} - c.Pipe = b - }) - - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - } - - clearRegistry("TestLogName") - } -} - -func TestLogDoesntWriteMessagesToLogGivenStderrLogPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - b := &bytes.Buffer{} - logger, _ := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.OutputPaths = []string{stderr} - c.Pipe = b - }) - - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - } - - clearRegistry("TestLogName") - } -} - -func TestLogWritesMessagesToLogGivenUpdatedLogPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for _, tc := range getLogLevelTestCase() { - ctx := context.Background() - logger, _ := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.OutputPaths = []string{} - }) - - dir := t.TempDir() - logPath := dir + "/log.txt" - SetConfig(Config{ - OutputPaths: []string{logPath}, - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - } - - clearRegistry("TestLogName") - } -} - -func logFeedbackInfo(l Logger, c context.Context, m string) { l.FeedbackInfo(c, m) } -func logFeedbackError(l Logger, c context.Context, m string) { l.FeedbackError(c, m) } -func logFeedbackErrorE(l Logger, c context.Context, m string) { - l.FeedbackErrorE(c, m, errors.New("test error")) -} - -func getFeedbackLogLevelTestCase() []LogLevelTestCase { - return []LogLevelTestCase{ - {Debug, logFeedbackInfo, "INFO", false, false, false}, - {Debug, logFeedbackError, "ERROR", false, false, false}, - {Debug, logFeedbackError, "ERROR", true, false, false}, - {Debug, logFeedbackErrorE, "ERROR", false, false, false}, - {Debug, logFeedbackErrorE, "ERROR", true, true, false}, - {Info, logFeedbackInfo, "INFO", false, false, true}, - {Info, logFeedbackInfo, "INFO", false, false, false}, - {Info, logFeedbackError, "ERROR", false, false, false}, - {Info, logFeedbackError, "ERROR", true, false, false}, - {Info, logFeedbackErrorE, "ERROR", false, false, false}, - {Info, logFeedbackErrorE, "ERROR", true, true, false}, - {Warn, logFeedbackInfo, "", false, false, false}, - {Warn, logFeedbackError, "ERROR", false, false, false}, - {Warn, logFeedbackError, "ERROR", true, false, false}, - {Warn, logFeedbackErrorE, "ERROR", false, false, false}, - {Warn, logFeedbackErrorE, "ERROR", true, true, false}, - {Error, logFeedbackInfo, "", false, false, false}, - {Error, logFeedbackError, "ERROR", false, false, true}, - {Error, logFeedbackError, "ERROR", false, false, false}, - {Error, logFeedbackError, "ERROR", true, false, false}, - {Error, logFeedbackErrorE, "ERROR", false, false, false}, - {Error, logFeedbackErrorE, "ERROR", true, true, false}, - {Fatal, logFeedbackInfo, "", false, false, false}, - {Fatal, logFeedbackError, "", false, false, false}, - {Fatal, logFeedbackErrorE, "", false, false, false}, - } -} - -func TestLogWritesMessagesToFeedbackLog(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - for i, tc := range getFeedbackLogLevelTestCase() { - ctx := context.Background() - b := &bytes.Buffer{} - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(tc.LogLevel) - c.EnableStackTrace = NewEnableStackTraceOption(tc.WithStackTrace) - c.EnableCaller = NewEnableCallerOption(tc.WithCaller) - c.Pipe = b - }) - logMessage := "test log message" - - tc.LogFunc(logger, ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if tc.ExpectedLogLevel == "" { - assert.Len(t, logLines, 0) - } else { - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines for tc %d", len(logLines), i) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, tc.ExpectedLogLevel, logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - _, hasStackTrace := logLines[0]["stacktrace"] - assert.Equal(t, tc.ExpectStackTrace, hasStackTrace) - _, hasCaller := logLines[0]["caller"] - assert.Equal(t, tc.WithCaller, hasCaller) - } - - if tc.ExpectStackTrace { - assert.Contains(t, b.String(), logMessage+"\ntest error. Stack:") - } else { - assert.Equal(t, logMessage+"\n", b.String()) - } - - clearRegistry("TestLogName") - } -} - -func TestLogWritesMessagesToLogGivenPipeWithValidPath(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - b := &bytes.Buffer{} - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - c.Pipe = b - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogDoesNotWriteMessagesToLogGivenOverrideForAnotherLoggerReducingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - c.OverridesByLoggerName = map[string]Config{ - "not this logger": {Level: NewLogLevelOption(Info)}, - } - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - assert.Len(t, logLines, 0) -} - -func TestLogWritesMessagesToLogGivenOverrideForLoggerReducingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - c.OverridesByLoggerName = map[string]Config{ - "TestLogName": {Level: NewLogLevelOption(Info)}, - } - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogWritesMessagesToLogGivenOverrideForLoggerRaisingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - c.OverridesByLoggerName = map[string]Config{ - "not this logger": {Level: NewLogLevelOption(Fatal)}, - } - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogDoesNotWriteMessagesToLogGivenOverrideForLoggerRaisingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - c.OverridesByLoggerName = map[string]Config{ - "TestLogName": {Level: NewLogLevelOption(Fatal)}, - } - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - assert.Len(t, logLines, 0) -} - -func TestLogDoesNotWriteMessagesToLogGivenOverrideUpdatedForAnotherLoggerReducingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - }) - SetConfig(Config{ - OverridesByLoggerName: map[string]Config{ - "not this logger": {Level: NewLogLevelOption(Info)}, - }, - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - assert.Len(t, logLines, 0) -} - -func TestLogWritesMessagesToLogGivenOverrideUpdatedForLoggerReducingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Fatal) - }) - SetConfig(Config{ - OverridesByLoggerName: map[string]Config{ - "TestLogName": {Level: NewLogLevelOption(Info)}, - }, - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogWritesMessagesToLogGivenOverrideUpdatedForAnotherLoggerRaisingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - }) - SetConfig(Config{ - OverridesByLoggerName: map[string]Config{ - "not this logger": {Level: NewLogLevelOption(Fatal)}, - }, - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - - assert.Equal(t, logMessage, logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) - // caller is disabled by default - assert.NotContains(t, logLines[0], "logging_test.go") -} - -func TestLogDoesNotWriteMessagesToLogGivenOverrideUpdatedForLoggerRaisingLogLevel(t *testing.T) { - defer clearConfig() - defer clearRegistry("TestLogName") - ctx := context.Background() - logger, logPath := getLogger(t, func(c *Config) { - c.Level = NewLogLevelOption(Info) - }) - SetConfig(Config{ - OverridesByLoggerName: map[string]Config{ - "TestLogName": {Level: NewLogLevelOption(Fatal)}, - }, - }) - logMessage := "test log message" - - logger.Info(ctx, logMessage) - logger.Flush() - - logLines, err := getLogLines(t, logPath) - if err != nil { - t.Fatal(err) - } - - assert.Len(t, logLines, 0) -} - -func TestGetGoLogger(t *testing.T) { - l := GetGoLogger("TestLogName") - assert.NotNil(t, l.ZapEventLogger) - assert.NotNil(t, l.logger) -} - -func TestGetGoLoggerAndApplyConfig(t *testing.T) { - l := GetGoLogger("TestLogName") - assert.NotNil(t, l.ZapEventLogger) - assert.NotNil(t, l.logger) - - b := &bytes.Buffer{} - l.ApplyConfig(Config{ - EncoderFormat: NewEncoderFormatOption(JSON), - Pipe: b, - }) - - l.ZapEventLogger.Info("some info") - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, "some info", logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) -} - -func TestGetGoLoggerV2(t *testing.T) { - l := GetGoLoggerV2("TestLogName") - assert.NotNil(t, l.ZapEventLogger) - assert.NotNil(t, l.logger) -} - -func TestGetGoLoggerV2AndApplyConfig(t *testing.T) { - l := GetGoLoggerV2("TestLogName") - assert.NotNil(t, l.ZapEventLogger) - assert.NotNil(t, l.logger) - - b := &bytes.Buffer{} - l.ApplyConfig(Config{ - EncoderFormat: NewEncoderFormatOption(JSON), - Pipe: b, - }) - - l.ZapEventLogger.Info("some info") - - logLines, err := parseLines(b) - if err != nil { - t.Fatal(err) - } - - if len(logLines) != 1 { - t.Fatalf("expecting exactly 1 log line but got %d lines", len(logLines)) - } - assert.Equal(t, "some info", logLines[0]["msg"]) - assert.Equal(t, "INFO", logLines[0]["level"]) - assert.Equal(t, "TestLogName", logLines[0]["logger"]) -} - -type Option = func(*Config) - -func getLogger(t *testing.T, options ...Option) (Logger, string) { - dir := t.TempDir() - logPath := dir + "/log.txt" - name := "TestLogName" - logConfig := Config{ - EncoderFormat: NewEncoderFormatOption(JSON), - OutputPaths: []string{logPath}, - } - - for _, o := range options { - o(&logConfig) - } - - logger := MustNewLogger(name) - SetConfig(logConfig) - return logger, getFirstOutputPath(logConfig.OutputPaths) -} - -func getFirstOutputPath(outputPaths []string) string { - if len(outputPaths) == 0 { - return stderr - } - return outputPaths[0] -} - -var errloggingToConsole = errors.New("no file to open. Logging to console") - -func getLogLines(t *testing.T, logPath string) ([]map[string]any, error) { - if logPath == stderr { - return nil, errloggingToConsole - } - - file, err := os.Open(logPath) - if err != nil { - return nil, err - } - defer func() { - err := file.Close() - if err != nil { - t.Error(err) - } - }() - - return parseLines(file) -} - -func parseLines(r io.Reader) ([]map[string]any, error) { - fileScanner := bufio.NewScanner(r) - - fileScanner.Split(bufio.ScanLines) - - logLines := []map[string]any{} - for fileScanner.Scan() { - loggedLine := make(map[string]any) - err := json.Unmarshal(fileScanner.Bytes(), &loggedLine) - if err != nil { - return nil, err - } - logLines = append(logLines, loggedLine) - } - - return logLines, nil -} - -func clearRegistry(name string) { - for _, logger := range registry[name] { - logger.Flush() - } - registry[name] = []Logger{} -} - -func clearConfig() { - configMutex.Lock() - defer configMutex.Unlock() - - cachedConfig = Config{} -} diff --git a/logging/registry.go b/logging/registry.go deleted file mode 100644 index 9410498a72..0000000000 --- a/logging/registry.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package logging - -import ( - "sync" -) - -var configMutex sync.RWMutex -var cachedConfig Config - -var registryMutex sync.Mutex -var registry = map[string][]Logger{ - "reprovider.simple": {GetGoLogger("reprovider.simple")}, - "badger": {GetGoLoggerV2("badger")}, -} - -func register(name string, logger Logger) { - registryMutex.Lock() - defer registryMutex.Unlock() - - loggers, exists := registry[name] - if !exists { - loggers = []Logger{} - } - loggers = append(loggers, logger) - registry[name] = loggers -} - -func setConfig(newConfig Config) Config { - configMutex.Lock() - defer configMutex.Unlock() - - cachedConfig = cachedConfig.with(newConfig) - return cachedConfig -} - -func updateLoggers(config Config) { - registryMutex.Lock() - defer registryMutex.Unlock() - - for loggerName, loggers := range registry { - newLoggerConfig := config.forLogger(loggerName) - - for _, logger := range loggers { - logger.ApplyConfig(newLoggerConfig) - } - } -} diff --git a/merkle/clock/clock.go b/merkle/clock/clock.go index 2bdc9fda93..3f1ae47cf6 100644 --- a/merkle/clock/clock.go +++ b/merkle/clock/clock.go @@ -19,13 +19,14 @@ import ( cid "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" + "github.com/sourcenetwork/corelog" + "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/logging" ) var ( - log = logging.MustNewLogger("merkleclock") + log = corelog.NewLogger("merkleclock") ) // MerkleClock is a MerkleCRDT clock that can be used to read/write events (deltas) to the clock. @@ -121,7 +122,6 @@ func (mc *MerkleClock) ProcessNode( nodeCid := node.Cid() priority := delta.GetPriority() - log.Debug(ctx, "Running ProcessNode", logging.NewKV("CID", nodeCid)) err := mc.crdt.Merge(ctx, delta) if err != nil { return NewErrMergingDelta(nodeCid, err) @@ -130,16 +130,13 @@ func (mc *MerkleClock) ProcessNode( links := node.Links() // check if we have any HEAD links hasHeads := false - log.Debug(ctx, "Stepping through node links") for _, l := range links { - log.Debug(ctx, "Checking link", logging.NewKV("Name", l.Name), logging.NewKV("CID", l.Cid)) if l.Name == "_head" { hasHeads = true break } } if !hasHeads { // reached the bottom, at a leaf - log.Debug(ctx, "No heads found") err := mc.headset.Write(ctx, nodeCid, priority) if err != nil { return NewErrAddingHead(nodeCid, err) @@ -148,14 +145,12 @@ func (mc *MerkleClock) ProcessNode( for _, l := range links { linkCid := l.Cid - log.Debug(ctx, "Scanning for replacement heads", logging.NewKV("Child", linkCid)) isHead, err := mc.headset.IsHead(ctx, linkCid) if err != nil { return NewErrCheckingHead(linkCid, err) } if isHead { - log.Debug(ctx, "Found head, replacing!") // reached one of the current heads, replace it with the tip // of current branch err = mc.headset.Replace(ctx, linkCid, nodeCid, priority) @@ -173,14 +168,13 @@ func (mc *MerkleClock) ProcessNode( if known { // we reached a non-head node in the known tree. // This means our root block is a new head - log.Debug(ctx, "Adding head") err := mc.headset.Write(ctx, nodeCid, priority) if err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failure adding head (when root is a new head)", err, - logging.NewKV("Root", nodeCid), + corelog.Any("Root", nodeCid), ) // OR should this also return like below comment?? // return nil, errors.Wrap("error adding head (when root is new head): %s ", root, err) diff --git a/merkle/clock/heads.go b/merkle/clock/heads.go index cafc7cb6fa..2bbb04d2d9 100644 --- a/merkle/clock/heads.go +++ b/merkle/clock/heads.go @@ -18,10 +18,10 @@ import ( cid "github.com/ipfs/go-cid" "github.com/ipfs/go-datastore/query" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/logging" ) // heads manages the current Merkle-CRDT heads. @@ -55,12 +55,12 @@ func (hh *heads) IsHead(ctx context.Context, c cid.Cid) (bool, error) { // Replace replaces a head with a new CID. func (hh *heads) Replace(ctx context.Context, old cid.Cid, new cid.Cid, height uint64) error { - log.Info( + log.InfoContext( ctx, "Replacing DAG head", - logging.NewKV("Old", old), - logging.NewKV("CID", new), - logging.NewKV("Height", height)) + corelog.Any("Old", old), + corelog.Any("CID", new), + corelog.Uint64("Height", height)) err := hh.store.Delete(ctx, hh.key(old).ToDS()) if err != nil { @@ -91,7 +91,7 @@ func (hh *heads) List(ctx context.Context) ([]cid.Cid, uint64, error) { defer func() { err := results.Close() if err != nil { - log.ErrorE(ctx, "Error closing results", err) + log.ErrorContextE(ctx, "Error closing results", err) } }() diff --git a/merkle/crdt/composite.go b/merkle/crdt/composite.go index ee43348bdc..f58813235a 100644 --- a/merkle/crdt/composite.go +++ b/merkle/crdt/composite.go @@ -59,7 +59,6 @@ func (m *MerkleCompositeDAG) Delete( ) (ipld.Node, uint64, error) { // Set() call on underlying CompositeDAG CRDT // persist/publish delta - log.Debug(ctx, "Applying delta-mutator 'Delete' on CompositeDAG") delta := m.reg.Set(links) delta.Status = client.Deleted nd, err := m.clock.AddDAGNode(ctx, delta) @@ -78,7 +77,6 @@ func (m *MerkleCompositeDAG) Save(ctx context.Context, data any) (ipld.Node, uin } // Set() call on underlying CompositeDAG CRDT // persist/publish delta - log.Debug(ctx, "Applying delta-mutator 'Set' on CompositeDAG") delta := m.reg.Set(value) nd, err := m.clock.AddDAGNode(ctx, delta) if err != nil { diff --git a/merkle/crdt/merklecrdt.go b/merkle/crdt/merklecrdt.go index b52fb7cf6d..c96791d07c 100644 --- a/merkle/crdt/merklecrdt.go +++ b/merkle/crdt/merklecrdt.go @@ -21,11 +21,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/logging" -) - -var ( - log = logging.MustNewLogger("merklecrdt") ) type Stores interface { diff --git a/net/client.go b/net/client.go index 20c33e33fd..414ee62e47 100644 --- a/net/client.go +++ b/net/client.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/events" - "github.com/sourcenetwork/defradb/logging" pb "github.com/sourcenetwork/defradb/net/pb" ) @@ -33,13 +32,6 @@ var ( // pushLog creates a pushLog request and sends it to another node // over libp2p grpc connection func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) error { - log.Debug( - ctx, - "Preparing pushLog request", - logging.NewKV("DocID", evt.DocID), - logging.NewKV("CID", evt.Cid), - logging.NewKV("SchemaRoot", evt.SchemaRoot)) - body := &pb.PushLogRequest_Body{ DocID: []byte(evt.DocID), Cid: evt.Cid.Bytes(), @@ -53,13 +45,6 @@ func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) er Body: body, } - log.Debug( - ctx, "Pushing log", - logging.NewKV("DocID", evt.DocID), - logging.NewKV("CID", evt.Cid), - logging.NewKV("PeerID", pid), - ) - client, err := s.dial(pid) // grpc dial over P2P stream if err != nil { return NewErrPushLog(err) diff --git a/net/dag.go b/net/dag.go index f083904915..cc20629c0f 100644 --- a/net/dag.go +++ b/net/dag.go @@ -19,8 +19,7 @@ import ( "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" - - "github.com/sourcenetwork/defradb/logging" + "github.com/sourcenetwork/corelog" ) var ( @@ -100,13 +99,6 @@ func (p *Peer) sendJobWorker() { // initialization in New(). func (p *Peer) dagWorker(jobs chan *dagJob) { for job := range jobs { - log.Debug( - p.ctx, - "Starting new job from DAG queue", - logging.NewKV("Datastore Key", job.bp.dsKey), - logging.NewKV("CID", job.cid), - ) - select { case <-p.ctx.Done(): // drain jobs from queue when we are done @@ -119,7 +111,11 @@ func (p *Peer) dagWorker(jobs chan *dagJob) { if j.bp.getter != nil && j.cid.Defined() { cNode, err := j.bp.getter.Get(p.ctx, j.cid) if err != nil { - log.ErrorE(p.ctx, "Failed to get node", err, logging.NewKV("CID", j.cid)) + log.ErrorContextE( + p.ctx, + "Failed to get node", + err, + corelog.Any("CID", j.cid)) j.session.Done() return } @@ -130,7 +126,11 @@ func (p *Peer) dagWorker(jobs chan *dagJob) { j.isComposite, ) if err != nil { - log.ErrorE(p.ctx, "Failed to process remote block", err, logging.NewKV("CID", j.cid)) + log.ErrorContextE( + p.ctx, + "Failed to process remote block", + err, + corelog.Any("CID", j.cid)) } } p.queuedChildren.Remove(j.cid) diff --git a/net/dag_test.go b/net/dag_test.go index 524847bfb8..ddd9e9aab3 100644 --- a/net/dag_test.go +++ b/net/dag_test.go @@ -188,7 +188,6 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { var getter ipld.NodeGetter = n2.Peer.newDAGSyncerTxn(txn2) if sessionMaker, ok := getter.(SessionDAGSyncer); ok { - log.Debug(ctx, "Upgrading DAGSyncer with a session") getter = sessionMaker.Session(ctx) } diff --git a/net/net.go b/net/net.go index add509a709..c7dbaf1810 100644 --- a/net/net.go +++ b/net/net.go @@ -12,10 +12,8 @@ package net -import ( - "github.com/sourcenetwork/defradb/logging" -) +import "github.com/sourcenetwork/corelog" var ( - log = logging.MustNewLogger("net") + log = corelog.NewLogger("net") ) diff --git a/net/node.go b/net/node.go index 9245f78772..a52e296712 100644 --- a/net/node.go +++ b/net/node.go @@ -38,6 +38,7 @@ import ( "github.com/libp2p/go-libp2p/core/routing" "github.com/multiformats/go-multiaddr" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/go-libp2p-pubsub-rpc/finalizer" // @TODO: https://github.com/sourcenetwork/defradb/issues/1902 @@ -46,7 +47,6 @@ import ( "github.com/libp2p/go-libp2p/p2p/net/connmgr" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/logging" ) var evtWaitTimeout = 10 * time.Second @@ -144,11 +144,11 @@ func NewNode( if err != nil { return nil, fin.Cleanup(err) } - log.Info( + log.InfoContext( ctx, "Created LibP2P host", - logging.NewKV("PeerId", h.ID()), - logging.NewKV("Address", options.ListenAddresses), + corelog.Any("PeerId", h.ID()), + corelog.Any("Address", options.ListenAddresses), ) var ps *pubsub.PubSub @@ -214,10 +214,10 @@ func (n *Node) Bootstrap(addrs []peer.AddrInfo) { defer wg.Done() err := n.host.Connect(n.ctx, pinfo) if err != nil { - log.Info(n.ctx, "Cannot connect to peer", logging.NewKV("Error", err)) + log.InfoContext(n.ctx, "Cannot connect to peer", corelog.Any("Error", err)) return } - log.Info(n.ctx, "Connected", logging.NewKV("PeerID", pinfo.ID)) + log.InfoContext(n.ctx, "Connected", corelog.Any("PeerID", pinfo.ID)) atomic.AddUint64(&connected, 1) }(pinfo) } @@ -225,12 +225,12 @@ func (n *Node) Bootstrap(addrs []peer.AddrInfo) { wg.Wait() if nPeers := len(addrs); int(connected) < nPeers/2 { - log.Info(n.ctx, fmt.Sprintf("Only connected to %d bootstrap peers out of %d", connected, nPeers)) + log.InfoContext(n.ctx, fmt.Sprintf("Only connected to %d bootstrap peers out of %d", connected, nPeers)) } err := n.dht.Bootstrap(n.ctx) if err != nil { - log.ErrorE(n.ctx, "Problem bootstraping using DHT", err) + log.ErrorContextE(n.ctx, "Problem bootstraping using DHT", err) return } } @@ -254,7 +254,7 @@ func (n *Node) PeerInfo() peer.AddrInfo { func (n *Node) subscribeToPeerConnectionEvents() { sub, err := n.host.EventBus().Subscribe(new(event.EvtPeerConnectednessChanged)) if err != nil { - log.Info( + log.InfoContext( n.ctx, fmt.Sprintf("failed to subscribe to peer connectedness changed event: %v", err), ) @@ -276,7 +276,7 @@ func (n *Node) subscribeToPeerConnectionEvents() { func (n *Node) subscribeToPubSubEvents() { sub, err := n.host.EventBus().Subscribe(new(EvtPubSub)) if err != nil { - log.Info( + log.InfoContext( n.ctx, fmt.Sprintf("failed to subscribe to pubsub event: %v", err), ) @@ -298,7 +298,7 @@ func (n *Node) subscribeToPubSubEvents() { func (n *Node) subscribeToPushLogEvents() { sub, err := n.host.EventBus().Subscribe(new(EvtReceivedPushLog)) if err != nil { - log.Info( + log.InfoContext( n.ctx, fmt.Sprintf("failed to subscribe to push log event: %v", err), ) diff --git a/net/peer.go b/net/peer.go index 0c456d5b18..61711b3918 100644 --- a/net/peer.go +++ b/net/peer.go @@ -31,6 +31,7 @@ import ( "github.com/libp2p/go-libp2p/core/peer" peerstore "github.com/libp2p/go-libp2p/core/peerstore" "github.com/libp2p/go-libp2p/core/routing" + "github.com/sourcenetwork/corelog" "google.golang.org/grpc" "github.com/sourcenetwork/defradb/client" @@ -39,7 +40,6 @@ import ( "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/events" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/merkle/clock" pb "github.com/sourcenetwork/defradb/net/pb" ) @@ -146,11 +146,11 @@ func (p *Peer) Start() error { addr := p.host.Peerstore().PeerInfo(id) err := p.host.Connect(p.ctx, addr) if err != nil { - log.Info( + log.InfoContext( p.ctx, "Failure while reconnecting to a known peer", - logging.NewKV("peer", id), - logging.NewKV("error", err), + corelog.Any("peer", id), + corelog.Any("error", err), ) } }(id) @@ -173,17 +173,20 @@ func (p *Peer) Start() error { } p.updateChannel = updateChannel - log.Info(p.ctx, "Starting internal broadcaster for pubsub network") + log.InfoContext(p.ctx, "Starting internal broadcaster for pubsub network") go p.handleBroadcastLoop() } - log.FeedbackInfo(p.ctx, "Starting P2P node", logging.NewKV("P2P addresses", p.host.Addrs())) + log.InfoContext( + p.ctx, + "Starting P2P node", + corelog.Any("P2P addresses", p.host.Addrs())) // register the P2P gRPC server go func() { pb.RegisterServiceServer(p.p2pRPC, p.server) if err := p.p2pRPC.Serve(p2plistener); err != nil && !errors.Is(err, grpc.ErrServerStopped) { - log.FatalE(p.ctx, "Fatal P2P RPC server error", err) + log.ErrorContextE(p.ctx, "Fatal P2P RPC server error", err) } }() @@ -197,13 +200,13 @@ func (p *Peer) Start() error { func (p *Peer) Close() { // close topics if err := p.server.removeAllPubsubTopics(); err != nil { - log.ErrorE(p.ctx, "Error closing pubsub topics", err) + log.ErrorContextE(p.ctx, "Error closing pubsub topics", err) } // stop gRPC server for _, c := range p.server.conns { if err := c.Close(); err != nil { - log.ErrorE(p.ctx, "Failed closing server RPC connections", err) + log.ErrorContextE(p.ctx, "Failed closing server RPC connections", err) } } stopGRPCServer(p.ctx, p.p2pRPC) @@ -212,12 +215,12 @@ func (p *Peer) Close() { // close event emitters if p.server.pubSubEmitter != nil { if err := p.server.pubSubEmitter.Close(); err != nil { - log.Info(p.ctx, "Could not close pubsub event emitter", logging.NewKV("Error", err.Error())) + log.InfoContext(p.ctx, "Could not close pubsub event emitter", corelog.Any("Error", err.Error())) } } if p.server.pushLogEmitter != nil { if err := p.server.pushLogEmitter.Close(); err != nil { - log.Info(p.ctx, "Could not close push log event emitter", logging.NewKV("Error", err.Error())) + log.InfoContext(p.ctx, "Could not close push log event emitter", corelog.Any("Error", err.Error())) } } @@ -226,11 +229,11 @@ func (p *Peer) Close() { } if err := p.bserv.Close(); err != nil { - log.ErrorE(p.ctx, "Error closing block service", err) + log.ErrorContextE(p.ctx, "Error closing block service", err) } if err := p.host.Close(); err != nil { - log.ErrorE(p.ctx, "Error closing host", err) + log.ErrorContextE(p.ctx, "Error closing host", err) } p.cancel() @@ -239,9 +242,7 @@ func (p *Peer) Close() { // handleBroadcast loop manages the transition of messages // from the internal broadcaster to the external pubsub network func (p *Peer) handleBroadcastLoop() { - log.Debug(p.ctx, "Waiting for messages on internal broadcaster") for { - log.Debug(p.ctx, "Handling internal broadcast bus message") update, isOpen := <-p.updateChannel if !isOpen { return @@ -255,11 +256,11 @@ func (p *Peer) handleBroadcastLoop() { } else if update.Priority > 1 { err = p.handleDocUpdateLog(update) } else { - log.Info(p.ctx, "Skipping log with invalid priority of 0", logging.NewKV("CID", update.Cid)) + log.InfoContext(p.ctx, "Skipping log with invalid priority of 0", corelog.Any("CID", update.Cid)) } if err != nil { - log.ErrorE(p.ctx, "Error while handling broadcast log", err) + log.ErrorContextE(p.ctx, "Error while handling broadcast log", err) } } } @@ -272,19 +273,13 @@ func (p *Peer) RegisterNewDocument( nd ipld.Node, schemaRoot string, ) error { - log.Debug( - p.ctx, - "Registering a new document for our peer node", - logging.NewKV("DocID", docID.String()), - ) - // register topic if err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot)); err != nil { - log.ErrorE( + log.ErrorContextE( p.ctx, "Failed to create new pubsub topic", err, - logging.NewKV("DocID", docID.String()), + corelog.String("DocID", docID.String()), ) return err } @@ -315,7 +310,7 @@ func (p *Peer) pushToReplicator( ) { for docIDResult := range docIDsCh { if docIDResult.Err != nil { - log.ErrorE(ctx, "Key channel error", docIDResult.Err) + log.ErrorContextE(ctx, "Key channel error", docIDResult.Err) continue } docID := core.DataStoreKeyFromDocID(docIDResult.ID) @@ -325,30 +320,30 @@ func (p *Peer) pushToReplicator( ) cids, priority, err := headset.List(ctx) if err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to get heads", err, - logging.NewKV("DocID", docIDResult.ID.String()), - logging.NewKV("PeerID", pid), - logging.NewKV("Collection", collection.Name())) + corelog.String("DocID", docIDResult.ID.String()), + corelog.Any("PeerID", pid), + corelog.Any("Collection", collection.Name())) continue } // loop over heads, get block, make the required logs, and send for _, c := range cids { blk, err := txn.DAGstore().Get(ctx, c) if err != nil { - log.ErrorE(ctx, "Failed to get block", err, - logging.NewKV("CID", c), - logging.NewKV("PeerID", pid), - logging.NewKV("Collection", collection.Name())) + log.ErrorContextE(ctx, "Failed to get block", err, + corelog.Any("CID", c), + corelog.Any("PeerID", pid), + corelog.Any("Collection", collection.Name())) continue } // @todo: remove encode/decode loop for core.Log data nd, err := dag.DecodeProtobuf(blk.RawData()) if err != nil { - log.ErrorE(ctx, "Failed to decode protobuf", err, logging.NewKV("CID", c)) + log.ErrorContextE(ctx, "Failed to decode protobuf", err, corelog.Any("CID", c)) continue } @@ -360,12 +355,12 @@ func (p *Peer) pushToReplicator( Priority: priority, } if err := p.server.pushLog(ctx, evt, pid); err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to replicate log", err, - logging.NewKV("CID", c), - logging.NewKV("PeerID", pid), + corelog.Any("CID", c), + corelog.Any("PeerID", pid), ) } } @@ -397,7 +392,7 @@ func (p *Peer) loadReplicators(ctx context.Context) error { // This will be used during connection and stream creation by libp2p. p.host.Peerstore().AddAddrs(rep.Info.ID, rep.Info.Addrs, peerstore.PermanentAddrTTL) - log.Info(ctx, "loaded replicators from datastore", logging.NewKV("Replicator", rep)) + log.InfoContext(ctx, "loaded replicators from datastore", corelog.Any("Replicator", rep)) } return nil @@ -433,7 +428,7 @@ func (p *Peer) handleDocCreateLog(evt events.Update) error { return err } // push to each peer (replicator) - p.pushLogToReplicators(p.ctx, evt) + p.pushLogToReplicators(evt) return nil } @@ -443,12 +438,6 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { if err != nil { return NewErrFailedToGetDocID(err) } - log.Debug( - p.ctx, - "Preparing pubsub pushLog request from broadcast", - logging.NewKV("DocID", docID), - logging.NewKV("CID", evt.Cid), - logging.NewKV("SchemaRoot", evt.SchemaRoot)) body := &pb.PushLogRequest_Body{ DocID: []byte(docID.String()), @@ -464,7 +453,7 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { } // push to each peer (replicator) - p.pushLogToReplicators(p.ctx, evt) + p.pushLogToReplicators(evt) if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil { return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID) @@ -477,7 +466,7 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { return nil } -func (p *Peer) pushLogToReplicators(ctx context.Context, lg events.Update) { +func (p *Peer) pushLogToReplicators(lg events.Update) { // push to each peer (replicator) peers := make(map[string]struct{}) for _, peer := range p.ps.ListPeers(lg.DocID) { @@ -500,13 +489,13 @@ func (p *Peer) pushLogToReplicators(ctx context.Context, lg events.Update) { } go func(peerID peer.ID) { if err := p.server.pushLog(p.ctx, lg, peerID); err != nil { - log.ErrorE( + log.ErrorContextE( p.ctx, "Failed pushing log", err, - logging.NewKV("DocID", lg.DocID), - logging.NewKV("CID", lg.Cid), - logging.NewKV("PeerID", peerID)) + corelog.String("DocID", lg.DocID), + corelog.Any("CID", lg.Cid), + corelog.Any("PeerID", peerID)) } }(pid) } @@ -532,7 +521,7 @@ func (p *Peer) newDAGSyncerTxn(txn datastore.Txn) ipld.DAGService { func (p *Peer) Session(ctx context.Context) ipld.NodeGetter { ng := dag.NewSession(ctx, p.DAGService) if ng == p.DAGService { - log.Info(ctx, "DAGService does not support sessions") + log.InfoContext(ctx, "DAGService does not support sessions") } return ng } @@ -547,7 +536,7 @@ func stopGRPCServer(ctx context.Context, server *grpc.Server) { select { case <-timer.C: server.Stop() - log.Info(ctx, "Peer gRPC server was shutdown ungracefully") + log.InfoContext(ctx, "Peer gRPC server was shutdown ungracefully") case <-stopped: timer.Stop() } diff --git a/net/process.go b/net/process.go index 5eec8a6efd..6779ada29f 100644 --- a/net/process.go +++ b/net/process.go @@ -22,13 +22,13 @@ import ( blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" merklecrdt "github.com/sourcenetwork/defradb/merkle/crdt" ) @@ -65,12 +65,12 @@ func (bp *blockProcessor) mergeBlocks(ctx context.Context) { nd := e.Value.(ipld.Node) err := bp.processBlock(ctx, nd, "") if err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to process block", err, - logging.NewKV("DocID", bp.dsKey.DocID), - logging.NewKV("CID", nd.Cid()), + corelog.String("DocID", bp.dsKey.DocID), + corelog.Any("CID", nd.Cid()), ) } } @@ -78,7 +78,7 @@ func (bp *blockProcessor) mergeBlocks(ctx context.Context) { // processBlock merges the block and its children to the datastore and sets the head accordingly. func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field string) error { - crdt, err := initCRDTForType(ctx, bp.txn, bp.col, bp.dsKey, field) + crdt, err := initCRDTForType(bp.txn, bp.col, bp.dsKey, field) if err != nil { return err } @@ -107,12 +107,12 @@ func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field } if err := bp.processBlock(ctx, nd, link.Name); err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to process block", err, - logging.NewKV("DocID", bp.dsKey.DocID), - logging.NewKV("CID", nd.Cid()), + corelog.String("DocID", bp.dsKey.DocID), + corelog.Any("CID", nd.Cid()), ) } } @@ -121,7 +121,6 @@ func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field } func initCRDTForType( - ctx context.Context, txn datastore.Txn, col client.Collection, dsKey core.DataStoreKey, @@ -131,7 +130,6 @@ func initCRDTForType( var ctype client.CType description := col.Description() if field == "" { // empty field name implies composite type - ctype = client.COMPOSITE key = base.MakeDataStoreKeyWithCollectionDescription( description, ).WithInstanceInfo( @@ -140,7 +138,6 @@ func initCRDTForType( core.COMPOSITE_NAMESPACE, ) - log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) return merklecrdt.NewMerkleCompositeDAG( txn, core.NewCollectionSchemaVersionKey(col.Schema().VersionID, col.ID()), @@ -157,7 +154,6 @@ func initCRDTForType( fieldID := fd.ID.String() key = base.MakeDataStoreKeyWithCollectionDescription(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) - log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) return merklecrdt.InstanceWithStore( txn, core.NewCollectionSchemaVersionKey(col.Schema().VersionID, col.ID()), @@ -183,8 +179,6 @@ func (bp *blockProcessor) processRemoteBlock( nd ipld.Node, isComposite bool, ) error { - log.Debug(ctx, "Running processLog") - if err := bp.txn.DAGstore().Put(ctx, nd); err != nil { return err } @@ -218,15 +212,14 @@ func (bp *blockProcessor) handleChildBlocks( exist, err := bp.txn.DAGstore().Has(ctx, link.Cid) if err != nil { - log.Error( + log.ErrorContext( ctx, "Failed to check for existing block", - logging.NewKV("CID", link.Cid), - logging.NewKV("ERROR", err), + corelog.Any("CID", link.Cid), + corelog.Any("ERROR", err), ) } if exist { - log.Debug(ctx, "Already have block locally, skipping.", logging.NewKV("CID", link.Cid)) continue } diff --git a/net/server.go b/net/server.go index 206ccb3b53..41cfd3625b 100644 --- a/net/server.go +++ b/net/server.go @@ -21,6 +21,7 @@ import ( format "github.com/ipfs/go-ipld-format" "github.com/libp2p/go-libp2p/core/event" libpeer "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/corelog" rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" "github.com/sourcenetwork/immutable" "google.golang.org/grpc" @@ -32,7 +33,6 @@ import ( "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" pb "github.com/sourcenetwork/defradb/net/pb" ) @@ -96,7 +96,6 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) } // Get all DocIDs across all collections in the DB - log.Debug(p.ctx, "Getting all existing DocIDs...") cols, err := s.db.GetCollections(s.peer.ctx, client.CollectionFetchOptions{}) if err != nil { return nil, err @@ -114,28 +113,22 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) } for docID := range docIDChan { - log.Debug( - p.ctx, - "Registering existing DocID pubsub topic", - logging.NewKV("DocID", docID.ID.String()), - ) if err := s.addPubSubTopic(docID.ID.String(), true); err != nil { return nil, err } i++ } } - log.Debug(p.ctx, "Finished registering all DocID pubsub topics", logging.NewKV("Count", i)) } var err error s.pubSubEmitter, err = s.peer.host.EventBus().Emitter(new(EvtPubSub)) if err != nil { - log.Info(s.peer.ctx, "could not create event emitter", logging.NewKV("Error", err.Error())) + log.InfoContext(s.peer.ctx, "could not create event emitter", corelog.String("Error", err.Error())) } s.pushLogEmitter, err = s.peer.host.EventBus().Emitter(new(EvtReceivedPushLog)) if err != nil { - log.Info(s.peer.ctx, "could not create event emitter", logging.NewKV("Error", err.Error())) + log.InfoContext(s.peer.ctx, "could not create event emitter", corelog.String("Error", err.Error())) } return s, nil @@ -200,8 +193,6 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL if err != nil { return nil, err } - log.Debug(ctx, "Received a PushLog request", logging.NewKV("PeerID", pid)) - cid, err := cid.Cast(req.Body.Cid) if err != nil { return nil, err @@ -217,7 +208,7 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL if s.pushLogEmitter != nil { byPeer, err := libpeer.Decode(req.Body.Creator) if err != nil { - log.Info(ctx, "could not decode the PeerID of the log creator", logging.NewKV("Error", err.Error())) + log.InfoContext(ctx, "could not decode the PeerID of the log creator", corelog.String("Error", err.Error())) } err = s.pushLogEmitter.Emit(EvtReceivedPushLog{ FromPeer: pid, @@ -226,7 +217,7 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL if err != nil { // logging instead of returning an error because the event bus should // not break the PushLog execution. - log.Info(ctx, "could not emit push log event", logging.NewKV("Error", err.Error())) + log.InfoContext(ctx, "could not emit push log event", corelog.String("Error", err.Error())) } } }() @@ -243,7 +234,6 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return nil, errors.Wrap(fmt.Sprintf("failed to check for existing block %s", cid), err) } if exists { - log.Debug(ctx, fmt.Sprintf("Already have block %s locally, skipping.", cid)) return &pb.PushLogReply{}, nil } @@ -270,7 +260,6 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL // Create a new DAG service with the current transaction var getter format.NodeGetter = s.peer.newDAGSyncerTxn(txn) if sessionMaker, ok := getter.(SessionDAGSyncer); ok { - log.Debug(ctx, "Upgrading DAGSyncer with a session") getter = sessionMaker.Session(ctx) } @@ -284,12 +273,12 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL bp := newBlockProcessor(s.peer, txn, col, dsKey, getter) err = bp.processRemoteBlock(ctx, &session, nd, true) if err != nil { - log.ErrorE( + log.ErrorContextE( ctx, "Failed to process remote block", err, - logging.NewKV("DocID", dsKey.DocID), - logging.NewKV("CID", cid), + corelog.String("DocID", dsKey.DocID), + corelog.Any("CID", cid), ) } session.Wait() @@ -490,32 +479,14 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe if _, err := t.Publish(ctx, data, rpc.WithIgnoreResponse(true)); err != nil { return errors.Wrap(fmt.Sprintf("failed publishing to thread %s", topic), err) } - - cid, err := cid.Cast(req.Body.Cid) - if err != nil { - return err - } - - log.Debug( - ctx, - "Published log", - logging.NewKV("CID", cid), - logging.NewKV("DocID", topic), - ) return nil } // pubSubMessageHandler handles incoming PushLog messages from the pubsub network. func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) ([]byte, error) { - log.Debug( - s.peer.ctx, - "Handling new pubsub message", - logging.NewKV("SenderID", from), - logging.NewKV("Topic", topic), - ) req := new(pb.PushLogRequest) if err := proto.Unmarshal(msg, req); err != nil { - log.ErrorE(s.peer.ctx, "Failed to unmarshal pubsub message %s", err) + log.ErrorContextE(s.peer.ctx, "Failed to unmarshal pubsub message %s", err) return nil, err } @@ -523,7 +494,6 @@ func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) Addr: addr{from}, }) if _, err := s.PushLog(ctx, req); err != nil { - log.ErrorE(ctx, "Failed pushing log for doc", err, logging.NewKV("Topic", topic)) return nil, errors.Wrap(fmt.Sprintf("Failed pushing log for doc %s", topic), err) } return nil, nil @@ -531,12 +501,12 @@ func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) // pubSubEventHandler logs events from the subscribed DocID topics. func (s *server) pubSubEventHandler(from libpeer.ID, topic string, msg []byte) { - log.Info( + log.InfoContext( s.peer.ctx, "Received new pubsub event", - logging.NewKV("SenderId", from), - logging.NewKV("Topic", topic), - logging.NewKV("Message", string(msg)), + corelog.Any("SenderId", from), + corelog.String("Topic", topic), + corelog.String("Message", string(msg)), ) if s.pubSubEmitter != nil { @@ -544,7 +514,7 @@ func (s *server) pubSubEventHandler(from libpeer.ID, topic string, msg []byte) { Peer: from, }) if err != nil { - log.Info(s.peer.ctx, "could not emit pubsub event", logging.NewKV("Error", err.Error())) + log.InfoContext(s.peer.ctx, "could not emit pubsub event", corelog.Any("Error", err.Error())) } } } diff --git a/node/node.go b/node/node.go index 89bedd56ff..9524247bf8 100644 --- a/node/node.go +++ b/node/node.go @@ -17,15 +17,15 @@ import ( gohttp "net/http" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/http" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" ) -var log = logging.MustNewLogger("node") +var log = corelog.NewLogger("node") // Options contains start configuration values. type Options struct { @@ -166,10 +166,10 @@ func (n *Node) Start(ctx context.Context) error { if err != nil { return err } - log.FeedbackInfo(ctx, fmt.Sprintf("Providing HTTP API at %s.", n.Server.Address())) + log.InfoContext(ctx, fmt.Sprintf("Providing HTTP API at %s.", n.Server.Address())) go func() { if err := n.Server.Serve(); err != nil && !errors.Is(err, gohttp.ErrServerClosed) { - log.FeedbackErrorE(ctx, "HTTP server stopped", err) + log.ErrorContextE(ctx, "HTTP server stopped", err) } }() } diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index d7c00bd664..dac81d0ce2 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -20,10 +20,10 @@ import ( ds "github.com/ipfs/go-datastore" "github.com/sourcenetwork/badger/v4" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/tests/bench/fixtures" testutils "github.com/sourcenetwork/defradb/tests/integration" ) @@ -35,12 +35,10 @@ const ( var ( storage string = "memory" - log = logging.MustNewLogger("tests.bench") + log = corelog.NewLogger("tests.bench") ) func init() { - logging.SetConfig(logging.Config{Level: logging.NewLogLevelOption(logging.Error)}) - // assign if not empty if s := os.Getenv(storageEnvName); s != "" { storage = s @@ -174,10 +172,10 @@ func BackfillBenchmarkDB( for { if err := cols[j].Create(ctx, doc); err != nil && err.Error() == badger.ErrConflict.Error() { - log.Info( + log.InfoContext( ctx, "Failed to commit TX for doc %s, retrying...\n", - logging.NewKV("DocID", doc.ID()), + corelog.Any("DocID", doc.ID()), ) continue } else if err != nil { diff --git a/tests/gen/cli/util_test.go b/tests/gen/cli/util_test.go index 10bd98ca99..7f58fbe0dd 100644 --- a/tests/gen/cli/util_test.go +++ b/tests/gen/cli/util_test.go @@ -16,6 +16,7 @@ import ( "testing" badger "github.com/sourcenetwork/badger/v4" + "github.com/sourcenetwork/corelog" "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" @@ -23,10 +24,9 @@ import ( "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" httpapi "github.com/sourcenetwork/defradb/http" - "github.com/sourcenetwork/defradb/logging" ) -var log = logging.MustNewLogger("cli") +var log = corelog.NewLogger("cli") type defraInstance struct { db client.DB @@ -39,9 +39,9 @@ func (di *defraInstance) close(ctx context.Context) { } func start(ctx context.Context) (*defraInstance, error) { - log.FeedbackInfo(ctx, "Starting DefraDB service...") + log.InfoContext(ctx, "Starting DefraDB service...") - log.FeedbackInfo(ctx, "Building new memory store") + log.InfoContext(ctx, "Building new memory store") opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} rootstore, err := badgerds.NewDatastore("", &opts) diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 0b9c4c2dab..eb44744e57 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -15,12 +15,12 @@ import ( "sort" "testing" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/logging" ) var ( @@ -151,7 +151,7 @@ func assertExplainRequestResults( // Note: if returned gql result is `nil` this panics (the panic seems useful while testing). resultantData := actualResult.Data.([]map[string]any) - log.Info(s.ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) + log.InfoContext(s.ctx, "", corelog.Any("FullExplainGraphResult", actualResult.Data)) // Check if the expected full explain graph (if provided) matches the actual full explain graph // that is returned, if doesn't match we would like to still see a diff comparison (handy while debugging). diff --git a/tests/integration/net/order/utils.go b/tests/integration/net/order/utils.go index 3ba5fc7f26..58857e5b94 100644 --- a/tests/integration/net/order/utils.go +++ b/tests/integration/net/order/utils.go @@ -15,20 +15,20 @@ import ( "fmt" "testing" + "github.com/sourcenetwork/corelog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" coreDB "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" netutils "github.com/sourcenetwork/defradb/net/utils" testutils "github.com/sourcenetwork/defradb/tests/integration" ) var ( - log = logging.MustNewLogger("test.net") + log = corelog.NewLogger("test.net") ) const ( @@ -75,7 +75,7 @@ func setupDefraNode( ) (*net.Node, []client.DocID, error) { ctx := context.Background() - log.Info(ctx, "Building new memory store") + log.InfoContext(ctx, "Building new memory store") db, err := testutils.NewBadgerMemoryDB(ctx, coreDB.WithUpdateEvents()) if err != nil { return nil, nil, err @@ -102,16 +102,16 @@ func setupDefraNode( // parse peers and bootstrap if len(peers) != 0 { - log.Info(ctx, "Parsing bootstrap peers", logging.NewKV("Peers", peers)) + log.InfoContext(ctx, "Parsing bootstrap peers", corelog.Any("Peers", peers)) addrs, err := netutils.ParsePeers(peers) if err != nil { return nil, nil, errors.Wrap(fmt.Sprintf("failed to parse bootstrap peers %v", peers), err) } - log.Info(ctx, "Bootstrapping with peers", logging.NewKV("Addresses", addrs)) + log.InfoContext(ctx, "Bootstrapping with peers", corelog.Any("Addresses", addrs)) n.Bootstrap(addrs) } - log.Info(ctx, "Starting P2P node", logging.NewKV("P2P addresses", n.PeerInfo().Addrs)) + log.InfoContext(ctx, "Starting P2P node", corelog.Any("P2P addresses", n.PeerInfo().Addrs)) if err := n.Start(); err != nil { n.Close() return nil, nil, errors.Wrap("unable to start P2P listeners", err) @@ -191,12 +191,12 @@ func executeTestCase(t *testing.T, test P2PTestCase) { nodes := []*net.Node{} for i, cfg := range test.NodeConfig { - log.Info(ctx, fmt.Sprintf("Setting up node %d", i)) + log.InfoContext(ctx, fmt.Sprintf("Setting up node %d", i)) var peerAddresses []string if peers, ok := test.NodePeers[i]; ok { for _, p := range peers { if p >= len(nodes) { - log.Info(ctx, "cannot set a peer that hasn't been started. Skipping to next peer") + log.InfoContext(ctx, "cannot set a peer that hasn't been started. Skipping to next peer") continue } peerInfo := nodes[p].PeerInfo() @@ -226,10 +226,10 @@ func executeTestCase(t *testing.T, test P2PTestCase) { if i == j { continue } - log.Info(ctx, fmt.Sprintf("Waiting for node %d to connect with peer %d", i, j)) + log.InfoContext(ctx, fmt.Sprintf("Waiting for node %d to connect with peer %d", i, j)) err := n.WaitForPubSubEvent(p.PeerID()) require.NoError(t, err) - log.Info(ctx, fmt.Sprintf("Node %d connected to peer %d", i, j)) + log.InfoContext(ctx, fmt.Sprintf("Node %d connected to peer %d", i, j)) } } } @@ -237,13 +237,13 @@ func executeTestCase(t *testing.T, test P2PTestCase) { // update and sync peers for n, updateMap := range test.Updates { if n >= len(nodes) { - log.Info(ctx, "cannot update a node that hasn't been started. Skipping to next node") + log.InfoContext(ctx, "cannot update a node that hasn't been started. Skipping to next node") continue } for d, updates := range updateMap { for _, update := range updates { - log.Info(ctx, fmt.Sprintf("Updating node %d with update %d", n, d)) + log.InfoContext(ctx, fmt.Sprintf("Updating node %d with update %d", n, d)) err := updateDocument(ctx, nodes[n].DB, docIDs[d], update) require.NoError(t, err) @@ -252,10 +252,10 @@ func executeTestCase(t *testing.T, test P2PTestCase) { if n2 == n { continue } - log.Info(ctx, fmt.Sprintf("Waiting for node %d to sync with peer %d", n2, n)) + log.InfoContext(ctx, fmt.Sprintf("Waiting for node %d to sync with peer %d", n2, n)) err := p.WaitForPushLogByPeerEvent(nodes[n].PeerInfo().ID) require.NoError(t, err) - log.Info(ctx, fmt.Sprintf("Node %d synced", n2)) + log.InfoContext(ctx, fmt.Sprintf("Node %d synced", n2)) } } } @@ -266,7 +266,7 @@ func executeTestCase(t *testing.T, test P2PTestCase) { continue } if n2 >= len(nodes) { - log.Info(ctx, "cannot check results of a node that hasn't been started. Skipping to next node") + log.InfoContext(ctx, "cannot check results of a node that hasn't been started. Skipping to next node") continue } @@ -308,10 +308,10 @@ func executeTestCase(t *testing.T, test P2PTestCase) { require.NoError(t, err) } for _, rep := range reps { - log.Info(ctx, fmt.Sprintf("Waiting for node %d to sync with peer %d", rep, n)) + log.InfoContext(ctx, fmt.Sprintf("Waiting for node %d to sync with peer %d", rep, n)) err := nodes[rep].WaitForPushLogByPeerEvent(nodes[n].PeerID()) require.NoError(t, err) - log.Info(ctx, fmt.Sprintf("Node %d synced", rep)) + log.InfoContext(ctx, fmt.Sprintf("Node %d synced", rep)) for docID, results := range test.ReplicatorResult[rep] { for field, result := range results { diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go index 4d48cb033b..7c6a919373 100644 --- a/tests/integration/p2p.go +++ b/tests/integration/p2p.go @@ -14,11 +14,11 @@ import ( "time" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" "github.com/sourcenetwork/defradb/tests/clients" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/corelog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -149,7 +149,7 @@ func connectPeers( targetNode := s.nodes[cfg.TargetNodeID] addrs := []peer.AddrInfo{targetNode.PeerInfo()} - log.Info(s.ctx, "Bootstrapping with peers", logging.NewKV("Addresses", addrs)) + log.InfoContext(s.ctx, "Bootstrapping with peers", corelog.Any("Addresses", addrs)) sourceNode.Bootstrap(addrs) // Bootstrap triggers a bunch of async stuff for which we have no good way of waiting on. It must be diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 40b2c81d86..930b429119 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -22,6 +22,7 @@ import ( "github.com/bxcodec/faker/support/slice" "github.com/fxamacker/cbor/v2" "github.com/libp2p/go-libp2p/core/crypto" + "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -30,7 +31,6 @@ import ( "github.com/sourcenetwork/defradb/datastore" badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" "github.com/sourcenetwork/defradb/request/graphql" changeDetector "github.com/sourcenetwork/defradb/tests/change_detector" @@ -69,7 +69,7 @@ const ( ) var ( - log = logging.MustNewLogger("tests.integration") + log = corelog.NewLogger("tests.integration") mutationType MutationType ) @@ -174,18 +174,18 @@ func executeTestCase( dbt DatabaseType, clientType ClientType, ) { - log.Info( + log.InfoContext( ctx, testCase.Description, - logging.NewKV("database", dbt), - logging.NewKV("client", clientType), - logging.NewKV("mutationType", mutationType), - logging.NewKV("databaseDir", databaseDir), - logging.NewKV("changeDetector.Enabled", changeDetector.Enabled), - logging.NewKV("changeDetector.SetupOnly", changeDetector.SetupOnly), - logging.NewKV("changeDetector.SourceBranch", changeDetector.SourceBranch), - logging.NewKV("changeDetector.TargetBranch", changeDetector.TargetBranch), - logging.NewKV("changeDetector.Repository", changeDetector.Repository), + corelog.Any("database", dbt), + corelog.Any("client", clientType), + corelog.Any("mutationType", mutationType), + corelog.String("databaseDir", databaseDir), + corelog.Bool("changeDetector.Enabled", changeDetector.Enabled), + corelog.Bool("changeDetector.SetupOnly", changeDetector.SetupOnly), + corelog.String("changeDetector.SourceBranch", changeDetector.SourceBranch), + corelog.String("changeDetector.TargetBranch", changeDetector.TargetBranch), + corelog.String("changeDetector.Repository", changeDetector.Repository), ) startActionIndex, endActionIndex := getActionRange(t, testCase) @@ -780,7 +780,7 @@ func configureNode( n, err = net.NewNode(s.ctx, db, nodeOpts...) require.NoError(s.t, err) - log.Info(s.ctx, "Starting P2P node", logging.NewKV("P2P address", n.PeerInfo())) + log.InfoContext(s.ctx, "Starting P2P node", corelog.Any("P2P address", n.PeerInfo())) if err := n.Start(); err != nil { n.Close() require.NoError(s.t, err) @@ -1755,7 +1755,7 @@ func assertRequestResults( return true } - log.Info(s.ctx, "", logging.NewKV("RequestResults", result.Data)) + log.InfoContext(s.ctx, "", corelog.Any("RequestResults", result.Data)) // compare results require.Equal(s.t, len(expectedResults), len(resultantData), From 85eca120998fe681729fa0e5b326544a2a8d1592 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Tue, 19 Mar 2024 13:54:23 -0400 Subject: [PATCH 05/49] refactor: Unify Field Kind and Schema properties (#2414) ## Relevant issue(s) Resolves #2409 ## Description Unifies the Field Kind and Schema properties. --- client/definitions.go | 5 - client/document.go | 14 +- client/schema_field_description.go | 316 ++++++++++--- db/backup.go | 11 +- db/collection.go | 25 +- db/collection_update.go | 2 +- db/errors.go | 21 +- db/index.go | 6 +- db/schema.go | 77 +--- .../i2409-unify-field-kind-and-schema.md | 3 + planner/mapper/mapper.go | 2 +- planner/type_join.go | 4 +- request/graphql/schema/collection.go | 38 +- request/graphql/schema/descriptions.go | 25 - request/graphql/schema/descriptions_test.go | 24 +- request/graphql/schema/generate.go | 16 +- tests/gen/gen_auto.go | 7 +- tests/gen/gen_auto_configurator.go | 33 +- tests/gen/gen_auto_test.go | 24 +- .../events/simple/with_update_test.go | 4 +- .../mutation/create/with_version_test.go | 2 +- .../peer/subscribe/with_add_get_test.go | 2 +- .../simple/replicator/with_create_test.go | 4 +- .../integration/query/commits/simple_test.go | 50 +- .../query/commits/with_cid_test.go | 8 +- .../query/commits/with_depth_test.go | 34 +- .../query/commits/with_doc_id_cid_test.go | 4 +- .../query/commits/with_doc_id_count_test.go | 6 +- .../query/commits/with_doc_id_field_test.go | 4 +- .../commits/with_doc_id_limit_offset_test.go | 4 +- .../query/commits/with_doc_id_limit_test.go | 4 +- .../with_doc_id_order_limit_offset_test.go | 4 +- .../query/commits/with_doc_id_order_test.go | 66 +-- .../query/commits/with_doc_id_test.go | 46 +- .../commits/with_doc_id_typename_test.go | 6 +- .../query/commits/with_field_test.go | 8 +- .../query/commits/with_group_test.go | 16 +- .../latest_commits/with_doc_id_field_test.go | 8 +- .../query/latest_commits/with_doc_id_test.go | 10 +- .../query/one_to_many/with_cid_doc_id_test.go | 8 +- .../query/simple/with_cid_doc_id_test.go | 16 +- .../query/simple/with_version_test.go | 38 +- tests/integration/schema/crdt_type_test.go | 4 +- tests/integration/schema/get_schema_test.go | 58 +-- .../schema/migrations/query/simple_test.go | 68 +-- .../migrations/query/with_doc_id_test.go | 8 +- .../schema/migrations/query/with_p2p_test.go | 20 +- .../migrations/query/with_restart_test.go | 8 +- .../migrations/query/with_set_default_test.go | 10 +- .../schema/migrations/query/with_txn_test.go | 8 +- .../migrations/query/with_update_test.go | 8 +- .../schema/migrations/simple_test.go | 8 +- tests/integration/schema/simple_test.go | 2 +- .../updates/add/field/create_update_test.go | 8 +- .../field/kind/foreign_object_array_test.go | 158 ++----- .../add/field/kind/foreign_object_test.go | 436 +----------------- .../updates/add/field/kind/invalid_test.go | 6 +- .../schema/updates/add/field/simple_test.go | 8 +- .../schema/updates/copy/field/simple_test.go | 4 +- .../schema/updates/move/simple_test.go | 2 +- .../updates/remove/fields/simple_test.go | 29 -- .../schema/updates/test/field/simple_test.go | 4 +- .../schema/updates/with_schema_branch_test.go | 30 +- .../schema/with_update_set_default_test.go | 4 +- tests/integration/utils2.go | 1 + tests/predefined/gen_predefined.go | 8 +- 66 files changed, 721 insertions(+), 1184 deletions(-) create mode 100644 docs/data_format_changes/i2409-unify-field-kind-and-schema.md diff --git a/client/definitions.go b/client/definitions.go index e521a69fcf..a8ee52d5af 100644 --- a/client/definitions.go +++ b/client/definitions.go @@ -78,10 +78,6 @@ type FieldDefinition struct { // Must contain a valid value. It is currently immutable. Kind FieldKind - // Schema contains the schema name of the type this field contains if this field is - // a relation field. Otherwise this will be empty. - Schema string - // RelationName the name of the relationship that this field represents if this field is // a relation field. Otherwise this will be empty. RelationName string @@ -102,7 +98,6 @@ func NewFieldDefinition(local CollectionFieldDescription, global SchemaFieldDesc Name: global.Name, ID: local.ID, Kind: global.Kind, - Schema: global.Schema, RelationName: global.RelationName, Typ: global.Typ, IsPrimaryRelation: global.IsPrimaryRelation, diff --git a/client/document.go b/client/document.go index 6c837260ba..c2ca6c90a1 100644 --- a/client/document.go +++ b/client/document.go @@ -198,6 +198,14 @@ func validateFieldSchema(val any, field SchemaFieldDescription) (any, error) { } } + if field.Kind.IsObjectArray() { + return nil, NewErrFieldOrAliasToFieldNotExist(field.Name) + } + + if field.Kind.IsObject() { + return getString(val) + } + switch field.Kind { case FieldKind_DocID, FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_BLOB: return getString(val) @@ -238,12 +246,6 @@ func validateFieldSchema(val any, field SchemaFieldDescription) (any, error) { case FieldKind_NILLABLE_INT_ARRAY: return getNillableArray(val, getInt64) - case FieldKind_FOREIGN_OBJECT: - return getString(val) - - case FieldKind_FOREIGN_OBJECT_ARRAY: - return nil, NewErrFieldOrAliasToFieldNotExist(field.Name) - case FieldKind_NILLABLE_JSON: return getJSON(val) } diff --git a/client/schema_field_description.go b/client/schema_field_description.go index 05b7f99a5e..3fa2c6bc1c 100644 --- a/client/schema_field_description.go +++ b/client/schema_field_description.go @@ -10,10 +10,36 @@ package client -import "fmt" +import ( + "encoding/json" + "strconv" + "strings" +) // FieldKind describes the type of a field. -type FieldKind uint8 +type FieldKind interface { + // String returns the string representation of this FieldKind. + String() string + + // Underlying returns the unterlying Kind as a string. + // + // If this is an array, it will return the element kind, else it will return the same as + // [String()]. + Underlying() string + + // IsNillable returns true if this kind supports nil values. + IsNillable() bool + + // IsObject returns true if this FieldKind is an object type, or an array of object types. + IsObject() bool + + // IsObjectArray returns true if this FieldKind is an object array type. + IsObjectArray() bool + + // IsArray returns true if this FieldKind is an array type which includes inline arrays as well + // as relation arrays. + IsArray() bool +} // SchemaFieldDescription describes a field on a Schema and its associated metadata. type SchemaFieldDescription struct { @@ -27,10 +53,6 @@ type SchemaFieldDescription struct { // Must contain a valid value. It is currently immutable. Kind FieldKind - // Schema contains the schema name of the type this field contains if this field is - // a relation field. Otherwise this will be empty. - Schema string - // RelationName the name of the relationship that this field represents if this field is // a relation field. Otherwise this will be empty. RelationName string @@ -44,99 +66,188 @@ type SchemaFieldDescription struct { IsPrimaryRelation bool } -func (f FieldKind) String() string { - switch f { +// ScalarKind represents singular scalar field kinds, such as `Int`. +type ScalarKind uint8 + +// ScalarArrayKind represnts arrays of simple scalar field kinds, such as `[Int]`. +type ScalarArrayKind uint8 + +// ObjectKind represents singular objects (foriegn and embedded), such as `User`. +type ObjectKind string + +// ObjectKind represents arrays of objects (foriegn and embedded), such as `[User]`. +type ObjectArrayKind string + +var _ FieldKind = ScalarKind(0) +var _ FieldKind = ScalarArrayKind(0) +var _ FieldKind = ObjectKind("") +var _ FieldKind = ObjectArrayKind("") + +func (k ScalarKind) String() string { + switch k { case FieldKind_DocID: return "ID" case FieldKind_NILLABLE_BOOL: return "Boolean" + case FieldKind_NILLABLE_INT: + return "Int" + case FieldKind_NILLABLE_DATETIME: + return "DateTime" + case FieldKind_NILLABLE_FLOAT: + return "Float" + case FieldKind_NILLABLE_STRING: + return "String" + case FieldKind_NILLABLE_BLOB: + return "Blob" + case FieldKind_NILLABLE_JSON: + return "JSON" + default: + return strconv.Itoa(int(k)) + } +} + +func (k ScalarKind) Underlying() string { + return k.String() +} + +func (k ScalarKind) IsNillable() bool { + return k != FieldKind_DocID +} + +func (k ScalarKind) IsObject() bool { + return false +} + +func (k ScalarKind) IsObjectArray() bool { + return false +} + +func (k ScalarKind) IsArray() bool { + return false +} + +func (k ScalarArrayKind) String() string { + switch k { case FieldKind_NILLABLE_BOOL_ARRAY: return "[Boolean]" case FieldKind_BOOL_ARRAY: return "[Boolean!]" - case FieldKind_NILLABLE_INT: - return "Int" case FieldKind_NILLABLE_INT_ARRAY: return "[Int]" case FieldKind_INT_ARRAY: return "[Int!]" - case FieldKind_NILLABLE_DATETIME: - return "DateTime" - case FieldKind_NILLABLE_FLOAT: - return "Float" case FieldKind_NILLABLE_FLOAT_ARRAY: return "[Float]" case FieldKind_FLOAT_ARRAY: return "[Float!]" - case FieldKind_NILLABLE_STRING: - return "String" case FieldKind_NILLABLE_STRING_ARRAY: return "[String]" case FieldKind_STRING_ARRAY: return "[String!]" - case FieldKind_NILLABLE_BLOB: - return "Blob" - case FieldKind_NILLABLE_JSON: - return "JSON" default: - return fmt.Sprint(uint8(f)) + return strconv.Itoa(int(k)) } } -// IsObject returns true if this FieldKind is an object type. -func (f FieldKind) IsObject() bool { - return f == FieldKind_FOREIGN_OBJECT || - f == FieldKind_FOREIGN_OBJECT_ARRAY +func (k ScalarArrayKind) Underlying() string { + return strings.Trim(k.String(), "[]") +} + +func (k ScalarArrayKind) IsNillable() bool { + return k == FieldKind_NILLABLE_BOOL_ARRAY || + k == FieldKind_NILLABLE_INT_ARRAY || + k == FieldKind_NILLABLE_FLOAT_ARRAY || + k == FieldKind_NILLABLE_STRING_ARRAY +} + +func (k ScalarArrayKind) IsObject() bool { + return false +} + +func (k ScalarArrayKind) IsObjectArray() bool { + return false +} + +func (k ScalarArrayKind) IsArray() bool { + return true +} + +func (k ObjectKind) String() string { + return string(k) +} + +func (k ObjectKind) Underlying() string { + return k.String() +} + +func (k ObjectKind) IsNillable() bool { + return true +} + +func (k ObjectKind) IsObject() bool { + return true +} + +func (k ObjectKind) IsObjectArray() bool { + return false +} + +func (k ObjectKind) IsArray() bool { + return false } -// IsObjectArray returns true if this FieldKind is an object array type. -func (f FieldKind) IsObjectArray() bool { - return f == FieldKind_FOREIGN_OBJECT_ARRAY +func (k ObjectArrayKind) String() string { + return "[" + string(k) + "]" } -// IsArray returns true if this FieldKind is an array type which includes inline arrays as well -// as relation arrays. -func (f FieldKind) IsArray() bool { - return f == FieldKind_BOOL_ARRAY || - f == FieldKind_INT_ARRAY || - f == FieldKind_FLOAT_ARRAY || - f == FieldKind_STRING_ARRAY || - f == FieldKind_FOREIGN_OBJECT_ARRAY || - f == FieldKind_NILLABLE_BOOL_ARRAY || - f == FieldKind_NILLABLE_INT_ARRAY || - f == FieldKind_NILLABLE_FLOAT_ARRAY || - f == FieldKind_NILLABLE_STRING_ARRAY +func (k ObjectArrayKind) Underlying() string { + return strings.Trim(k.String(), "[]") +} + +func (k ObjectArrayKind) IsNillable() bool { + return true +} + +func (k ObjectArrayKind) IsObject() bool { + return true +} + +func (k ObjectArrayKind) IsObjectArray() bool { + return true +} + +func (k ObjectArrayKind) IsArray() bool { + return true +} + +func (k ObjectArrayKind) MarshalJSON() ([]byte, error) { + return []byte(`"` + k.String() + `"`), nil } // Note: These values are serialized and persisted in the database, avoid modifying existing values. const ( - FieldKind_None FieldKind = 0 - FieldKind_DocID FieldKind = 1 - FieldKind_NILLABLE_BOOL FieldKind = 2 - FieldKind_BOOL_ARRAY FieldKind = 3 - FieldKind_NILLABLE_INT FieldKind = 4 - FieldKind_INT_ARRAY FieldKind = 5 - FieldKind_NILLABLE_FLOAT FieldKind = 6 - FieldKind_FLOAT_ARRAY FieldKind = 7 - _ FieldKind = 8 // safe to repurpose (was never used) - _ FieldKind = 9 // safe to repurpose (previously old field) - FieldKind_NILLABLE_DATETIME FieldKind = 10 - FieldKind_NILLABLE_STRING FieldKind = 11 - FieldKind_STRING_ARRAY FieldKind = 12 - FieldKind_NILLABLE_BLOB FieldKind = 13 - FieldKind_NILLABLE_JSON FieldKind = 14 - _ FieldKind = 15 // safe to repurpose (was never used) - - // Embedded object, but accessed via foreign keys - FieldKind_FOREIGN_OBJECT FieldKind = 16 - - // Array of embedded objects, accessed via foreign keys - FieldKind_FOREIGN_OBJECT_ARRAY FieldKind = 17 - - FieldKind_NILLABLE_BOOL_ARRAY FieldKind = 18 - FieldKind_NILLABLE_INT_ARRAY FieldKind = 19 - FieldKind_NILLABLE_FLOAT_ARRAY FieldKind = 20 - FieldKind_NILLABLE_STRING_ARRAY FieldKind = 21 + FieldKind_None ScalarKind = 0 + FieldKind_DocID ScalarKind = 1 + FieldKind_NILLABLE_BOOL ScalarKind = 2 + FieldKind_BOOL_ARRAY ScalarArrayKind = 3 + FieldKind_NILLABLE_INT ScalarKind = 4 + FieldKind_INT_ARRAY ScalarArrayKind = 5 + FieldKind_NILLABLE_FLOAT ScalarKind = 6 + FieldKind_FLOAT_ARRAY ScalarArrayKind = 7 + _ ScalarKind = 8 // safe to repurpose (was never used) + _ ScalarKind = 9 // safe to repurpose (previously old field) + FieldKind_NILLABLE_DATETIME ScalarKind = 10 + FieldKind_NILLABLE_STRING ScalarKind = 11 + FieldKind_STRING_ARRAY ScalarArrayKind = 12 + FieldKind_NILLABLE_BLOB ScalarKind = 13 + FieldKind_NILLABLE_JSON ScalarKind = 14 + _ ScalarKind = 15 // safe to repurpose (was never used) + _ ScalarKind = 16 // Deprecated 2024-03-15, was FieldKind_FOREIGN_OBJECT + _ ScalarKind = 17 // Deprecated 2024-03-15, was FieldKind_FOREIGN_OBJECT_ARRAY + FieldKind_NILLABLE_BOOL_ARRAY ScalarArrayKind = 18 + FieldKind_NILLABLE_INT_ARRAY ScalarArrayKind = 19 + FieldKind_NILLABLE_FLOAT_ARRAY ScalarArrayKind = 20 + FieldKind_NILLABLE_STRING_ARRAY ScalarArrayKind = 21 ) // FieldKindStringToEnumMapping maps string representations of [FieldKind] values to @@ -169,3 +280,74 @@ var FieldKindStringToEnumMapping = map[string]FieldKind{ func (f SchemaFieldDescription) IsRelation() bool { return f.RelationName != "" } + +// schemaFieldDescription is a private type used to facilitate the unmarshalling +// of json to a [SchemaFieldDescription]. +type schemaFieldDescription struct { + Name string + RelationName string + Typ CType + IsPrimaryRelation bool + + // Properties below this line are unmarshalled using custom logic in [UnmarshalJSON] + Kind json.RawMessage +} + +func (f *SchemaFieldDescription) UnmarshalJSON(bytes []byte) error { + var descMap schemaFieldDescription + err := json.Unmarshal(bytes, &descMap) + if err != nil { + return err + } + + f.Name = descMap.Name + f.RelationName = descMap.RelationName + f.Typ = descMap.Typ + f.IsPrimaryRelation = descMap.IsPrimaryRelation + + if len(descMap.Kind) == 0 { + f.Kind = FieldKind_None + return nil + } + + if descMap.Kind[0] != '"' { + // If the Kind is not represented by a string, assume try to parse it to an int, as + // that is the only other type we support. + var intKind uint8 + err := json.Unmarshal(descMap.Kind, &intKind) + if err != nil { + return err + } + switch intKind { + case uint8(FieldKind_BOOL_ARRAY), uint8(FieldKind_INT_ARRAY), uint8(FieldKind_FLOAT_ARRAY), + uint8(FieldKind_STRING_ARRAY), uint8(FieldKind_NILLABLE_BOOL_ARRAY), uint8(FieldKind_NILLABLE_INT_ARRAY), + uint8(FieldKind_NILLABLE_FLOAT_ARRAY), uint8(FieldKind_NILLABLE_STRING_ARRAY): + f.Kind = ScalarArrayKind(intKind) + default: + f.Kind = ScalarKind(intKind) + } + } else { + var strKind string + err := json.Unmarshal(descMap.Kind, &strKind) + if err != nil { + return err + } + + kind, ok := FieldKindStringToEnumMapping[strKind] + if ok { + f.Kind = kind + } else { + // If we don't find the string representation of this type in the + // scalar mapping, assume it is an object - if it is not, validation + // will catch this later. If it is unknown we have no way of telling + // as to whether the user thought it was a scalar or an object anyway. + if strKind[0] == '[' { + f.Kind = ObjectArrayKind(strings.Trim(strKind, "[]")) + } else { + f.Kind = ObjectKind(strKind) + } + } + } + + return nil +} diff --git a/db/backup.go b/db/backup.go index d47b3534e1..81d62f78b9 100644 --- a/db/backup.go +++ b/db/backup.go @@ -72,7 +72,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin // check if self referencing and remove from docMap for key creation resetMap := map[string]any{} for _, field := range col.Schema().Fields { - if field.Kind == client.FieldKind_FOREIGN_OBJECT { + if field.Kind.IsObject() && !field.Kind.IsArray() { if val, ok := docMap[field.Name+request.RelatedObjectID]; ok { if docMap[request.NewDocIDFieldName] == val { resetMap[field.Name+request.RelatedObjectID] = val @@ -214,9 +214,8 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client refFieldName := "" // replace any foreign key if it needs to be changed for _, field := range col.Schema().Fields { - switch field.Kind { - case client.FieldKind_FOREIGN_OBJECT: - if _, ok := colNameCache[field.Schema]; !ok { + if field.Kind.IsObject() && !field.Kind.IsArray() { + if _, ok := colNameCache[field.Kind.Underlying()]; !ok { continue } if foreignKey, err := doc.Get(field.Name + request.RelatedObjectID); err == nil { @@ -230,9 +229,9 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client refFieldName = field.Name + request.RelatedObjectID } } else { - foreignCol, err := db.getCollectionByName(ctx, txn, field.Schema) + foreignCol, err := db.getCollectionByName(ctx, txn, field.Kind.Underlying()) if err != nil { - return NewErrFailedToGetCollection(field.Schema, err) + return NewErrFailedToGetCollection(field.Kind.Underlying(), err) } foreignDocID, err := client.NewDocIDFromString(foreignKey.(string)) if err != nil { diff --git a/db/collection.go b/db/collection.go index 566722fca9..b1fc102943 100644 --- a/db/collection.go +++ b/db/collection.go @@ -201,7 +201,7 @@ func (db *db) updateSchema( } for _, field := range schema.Fields { - if field.Kind == client.FieldKind_FOREIGN_OBJECT { + if field.Kind.IsObject() && !field.Kind.IsArray() { idFieldName := field.Name + "_id" if _, ok := schema.GetFieldByName(idFieldName); !ok { schema.Fields = append(schema.Fields, client.SchemaFieldDescription{ @@ -436,16 +436,11 @@ func validateUpdateSchemaFields( // If the field is new, then the collection has changed hasChanged = hasChanged || !fieldAlreadyExists - if !fieldAlreadyExists && (proposedField.Kind == client.FieldKind_FOREIGN_OBJECT || - proposedField.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY) { - if proposedField.Schema == "" { - return false, NewErrRelationalFieldMissingSchema(proposedField.Name, proposedField.Kind) - } - - relatedDesc, relatedDescFound := descriptionsByName[proposedField.Schema] + if !fieldAlreadyExists && proposedField.Kind.IsObject() { + relatedDesc, relatedDescFound := descriptionsByName[proposedField.Kind.Underlying()] if !relatedDescFound { - return false, NewErrSchemaNotFound(proposedField.Name, proposedField.Schema) + return false, NewErrFieldKindNotFound(proposedField.Name, proposedField.Kind.Underlying()) } if proposedField.RelationName == "" { @@ -453,12 +448,12 @@ func validateUpdateSchemaFields( } if proposedField.IsPrimaryRelation { - if proposedField.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { + if proposedField.Kind.IsObjectArray() { return false, NewErrPrimarySideOnMany(proposedField.Name) } } - if proposedField.Kind == client.FieldKind_FOREIGN_OBJECT { + if proposedField.Kind.IsObject() && !proposedField.Kind.IsArray() { idFieldName := proposedField.Name + request.RelatedObjectID idField, idFieldFound := proposedDesc.GetFieldByName(idFieldName) if idFieldFound { @@ -485,7 +480,7 @@ func validateUpdateSchemaFields( } if !relatedFieldFound { - return false, client.NewErrRelationOneSided(proposedField.Name, proposedField.Schema) + return false, client.NewErrRelationOneSided(proposedField.Name, proposedField.Kind.Underlying()) } if !(proposedField.IsPrimaryRelation || relatedField.IsPrimaryRelation) { @@ -1539,11 +1534,11 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( if !ok { return client.NewErrFieldNotExist(strings.TrimSuffix(fieldDescription.Name, request.RelatedObjectID)) } - if objFieldDescription.Kind != client.FieldKind_FOREIGN_OBJECT { + if !(objFieldDescription.Kind.IsObject() && !objFieldDescription.Kind.IsArray()) { return nil } - otherCol, err := c.db.getCollectionByName(ctx, txn, objFieldDescription.Schema) + otherCol, err := c.db.getCollectionByName(ctx, txn, objFieldDescription.Kind.Underlying()) if err != nil { return err } @@ -1554,7 +1549,7 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( objFieldDescription.Name, &otherSchema, ) - if otherObjFieldDescription.Kind != client.FieldKind_FOREIGN_OBJECT { + if !(otherObjFieldDescription.Kind.IsObject() && !otherObjFieldDescription.Kind.IsArray()) { // If the other field is not an object field then this is not a one to one relation and we can continue return nil } diff --git a/db/collection_update.go b/db/collection_update.go index 7ddd868e47..496d8bf81c 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -321,7 +321,7 @@ func (c *collection) patchPrimaryDoc( return err } - primaryCol, err := c.db.getCollectionByName(ctx, txn, relationFieldDescription.Schema) + primaryCol, err := c.db.getCollectionByName(ctx, txn, relationFieldDescription.Kind.Underlying()) if err != nil { return err } diff --git a/db/errors.go b/db/errors.go index a41e396a8b..bda5154f79 100644 --- a/db/errors.go +++ b/db/errors.go @@ -28,7 +28,6 @@ const ( errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" errCannotModifySchemaName string = "modifying the schema name is not supported" errCannotSetVersionID string = "setting the VersionID is not supported" - errRelationalFieldMissingSchema string = "a schema name must be provided when adding a new relation field" errRelationalFieldInvalidRelationType string = "invalid RelationType" errRelationalFieldMissingIDField string = "missing id field for relation object field" errRelationalFieldMissingRelationName string = "missing relation name" @@ -44,7 +43,6 @@ const ( errCannotDeleteField string = "deleting an existing field is not supported" errFieldKindNotFound string = "no type found for given name" errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" - errSchemaNotFound string = "no schema found for given name" errDocumentAlreadyExists string = "a document with the given ID already exists" errDocumentDeleted string = "a document with the given ID has been deleted" errIndexMissingFields string = "index missing fields" @@ -258,14 +256,6 @@ func NewErrCannotModifySchemaName(existingName, proposedName string) error { ) } -func NewErrRelationalFieldMissingSchema(name string, kind client.FieldKind) error { - return errors.New( - errRelationalFieldMissingSchema, - errors.NewKV("Field", name), - errors.NewKV("Kind", kind), - ) -} - func NewErrRelationalFieldMissingIDField(name string, expectedName string) error { return errors.New( errRelationalFieldMissingIDField, @@ -320,9 +310,10 @@ func NewErrRelationalFieldIDInvalidType(name string, expected, actual client.Fie ) } -func NewErrFieldKindNotFound(kind string) error { +func NewErrFieldKindNotFound(name string, kind string) error { return errors.New( errFieldKindNotFound, + errors.NewKV("Field", name), errors.NewKV("Kind", kind), ) } @@ -335,14 +326,6 @@ func NewErrFieldKindDoesNotMatchFieldSchema(kind string, schema string) error { ) } -func NewErrSchemaNotFound(name string, schema string) error { - return errors.New( - errSchemaNotFound, - errors.NewKV("Field", name), - errors.NewKV("Schema", schema), - ) -} - func NewErrDuplicateField(name string) error { return errors.New(errDuplicateField, errors.NewKV("Name", name)) } diff --git a/db/index.go b/db/index.go index 319cdeb8a7..5de38aac44 100644 --- a/db/index.go +++ b/db/index.go @@ -36,8 +36,12 @@ func canConvertIndexFieldValue[T any](val any) bool { } func getValidateIndexFieldFunc(kind client.FieldKind) func(any) bool { + if kind.IsObject() && !kind.IsArray() { + return canConvertIndexFieldValue[string] + } + switch kind { - case client.FieldKind_NILLABLE_STRING, client.FieldKind_FOREIGN_OBJECT: + case client.FieldKind_NILLABLE_STRING: return canConvertIndexFieldValue[string] case client.FieldKind_NILLABLE_INT: return canConvertIndexFieldValue[int64] diff --git a/db/schema.go b/db/schema.go index a4582158f3..676d983983 100644 --- a/db/schema.go +++ b/db/schema.go @@ -170,10 +170,10 @@ func substituteSchemaPatch( return nil, err } - path = strings.TrimPrefix(path, "/") - splitPath := strings.Split(path, "/") - if value, hasValue := patchOperation["value"]; hasValue { + path = strings.TrimPrefix(path, "/") + splitPath := strings.Split(path, "/") + var newPatchValue immutable.Option[any] var field map[string]any isField := isField(splitPath) @@ -223,40 +223,6 @@ func substituteSchemaPatch( } } - if isField { - if kind, isString := field["Kind"].(string); isString { - substitute, schemaName, err := getSubstituteFieldKind(kind, schemaByName) - if err != nil { - return nil, err - } - - field["Kind"] = substitute - if schemaName != "" { - if field["Schema"] != nil && field["Schema"] != schemaName { - return nil, NewErrFieldKindDoesNotMatchFieldSchema(kind, field["Schema"].(string)) - } - field["Schema"] = schemaName - } - - newPatchValue = immutable.Some[any](field) - } - } else if isFieldKind(splitPath) { - var kind any - err = json.Unmarshal(*value, &kind) - if err != nil { - return nil, err - } - - if kind, isString := kind.(string); isString { - substitute, _, err := getSubstituteFieldKind(kind, schemaByName) - if err != nil { - return nil, err - } - - newPatchValue = immutable.Some[any](substitute) - } - } - if newPatchValue.HasValue() { substitute, err := json.Marshal(newPatchValue.Value()) if err != nil { @@ -331,36 +297,6 @@ func (db *db) getSchemas( return result, nil } -// getSubstituteFieldKind checks and attempts to get the underlying integer value for the given string -// Field Kind value. It will return the value if one is found, else returns an [ErrFieldKindNotFound]. -// -// If the value represents a foreign relation the collection name will also be returned. -func getSubstituteFieldKind( - kind string, - schemaByName map[string]client.SchemaDescription, -) (client.FieldKind, string, error) { - substitute, substituteFound := client.FieldKindStringToEnumMapping[kind] - if substituteFound { - return substitute, "", nil - } else { - var collectionName string - var substitute client.FieldKind - if len(kind) > 0 && kind[0] == '[' && kind[len(kind)-1] == ']' { - collectionName = kind[1 : len(kind)-1] - substitute = client.FieldKind_FOREIGN_OBJECT_ARRAY - } else { - collectionName = kind - substitute = client.FieldKind_FOREIGN_OBJECT - } - - if _, substituteFound := schemaByName[collectionName]; substituteFound { - return substitute, collectionName, nil - } - - return 0, "", NewErrFieldKindNotFound(kind) - } -} - // isFieldOrInner returns true if the given path points to a SchemaFieldDescription or a property within it. func isFieldOrInner(path []string) bool { //nolint:goconst @@ -372,13 +308,6 @@ func isField(path []string) bool { return len(path) == 3 && path[fieldsPathIndex] == "Fields" } -// isField returns true if the given path points to a SchemaFieldDescription.Kind property. -func isFieldKind(path []string) bool { - return len(path) == 4 && - path[fieldIndexPathIndex+1] == "Kind" && - path[fieldsPathIndex] == "Fields" -} - // containsLetter returns true if the string contains a single unicode character. func containsLetter(s string) bool { for _, r := range s { diff --git a/docs/data_format_changes/i2409-unify-field-kind-and-schema.md b/docs/data_format_changes/i2409-unify-field-kind-and-schema.md new file mode 100644 index 0000000000..edda3e1e3e --- /dev/null +++ b/docs/data_format_changes/i2409-unify-field-kind-and-schema.md @@ -0,0 +1,3 @@ +# Unify Field Kind and Schema properties + +The client SchemaFieldDescription Kind and Schema properties have been unified, and FieldKind values 16 and 17 (foreign objects) have been replaced. diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index 953d21ce17..b48fbb6f9c 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -733,7 +733,7 @@ func getCollectionName( if parentHasField && hostFieldDesc.RelationName != "" { // If this field exists on the parent, and it is a child object // then this collection name is the collection name of the child. - return hostFieldDesc.Schema, nil + return hostFieldDesc.Kind.Underlying(), nil } } diff --git a/planner/type_join.go b/planner/type_join.go index 915a2d128f..dd2fae8e77 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -85,9 +85,9 @@ func (p *Planner) makeTypeIndexJoin( return nil, client.NewErrFieldNotExist(subType.Name) } - if typeFieldDesc.Kind == client.FieldKind_FOREIGN_OBJECT { // One-to-One, or One side of One-to-Many + if typeFieldDesc.Kind.IsObject() && !typeFieldDesc.Kind.IsArray() { // One-to-One, or One side of One-to-Many joinPlan, err = p.makeTypeJoinOne(parent, source, subType) - } else if typeFieldDesc.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { // Many side of One-to-Many + } else if typeFieldDesc.Kind.IsObjectArray() { // Many side of One-to-Many joinPlan, err = p.makeTypeJoinMany(parent, source, subType) } else { // more to come, Many-to-Many, Embedded? return nil, ErrUnknownRelationType diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index d9ebefa680..3786d41209 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -327,14 +327,14 @@ func fieldsFromAST(field *ast.FieldDefinition, fieldDescriptions := []client.SchemaFieldDescription{} - if kind == client.FieldKind_FOREIGN_OBJECT || kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { - if kind == client.FieldKind_FOREIGN_OBJECT { + if kind.IsObject() { + if !kind.IsArray() { schema = field.Type.(*ast.Named).Name.Value relationType = relation_Type_ONE if _, exists := findDirective(field, "primary"); exists { relationType |= relation_Type_Primary } - } else if kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { + } else { schema = field.Type.(*ast.List).Type.(*ast.Named).Name.Value relationType = relation_Type_MANY } @@ -344,7 +344,7 @@ func fieldsFromAST(field *ast.FieldDefinition, return nil, err } - if kind == client.FieldKind_FOREIGN_OBJECT { + if !kind.IsArray() { // An _id field is added for every 1-N relationship from this object. fieldDescriptions = append(fieldDescriptions, client.SchemaFieldDescription{ Name: fmt.Sprintf("%s_id", field.Name.Value), @@ -376,7 +376,6 @@ func fieldsFromAST(field *ast.FieldDefinition, Name: field.Name.Value, Kind: kind, Typ: cType, - Schema: schema, RelationName: relationName, } @@ -401,6 +400,15 @@ func setCRDTType(field *ast.FieldDefinition, kind client.FieldKind) (client.CTyp } } } + + if kind.IsObjectArray() { + return client.NONE_CRDT, nil + } + + if kind.IsObject() { + return client.LWW_REGISTER, nil + } + return defaultCRDTForFieldKind[kind], nil } @@ -430,7 +438,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case typeString: return client.FieldKind_STRING_ARRAY, nil default: - return 0, NewErrNonNullForTypeNotSupported(innerAstTypeVal.Type.(*ast.Named).Name.Value) + return client.FieldKind_None, NewErrNonNullForTypeNotSupported(innerAstTypeVal.Type.(*ast.Named).Name.Value) } default: @@ -444,7 +452,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case typeString: return client.FieldKind_NILLABLE_STRING_ARRAY, nil default: - return client.FieldKind_FOREIGN_OBJECT_ARRAY, nil + return client.ObjectArrayKind(astTypeVal.Type.(*ast.Named).Name.Value), nil } } @@ -467,14 +475,14 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case typeJSON: return client.FieldKind_NILLABLE_JSON, nil default: - return client.FieldKind_FOREIGN_OBJECT, nil + return client.ObjectKind(astTypeVal.Name.Value), nil } case *ast.NonNull: - return 0, ErrNonNullNotSupported + return client.FieldKind_None, ErrNonNullNotSupported default: - return 0, NewErrTypeNotFound(t.String()) + return client.FieldKind_None, NewErrTypeNotFound(t.String()) } } @@ -532,20 +540,20 @@ func finalizeRelations(relationManager *RelationManager, definitions []client.Co return err } - _, fieldRelationType, ok := rel.getField(field.Schema, field.Name) + _, fieldRelationType, ok := rel.getField(field.Kind.Underlying(), field.Name) if !ok { - return NewErrRelationMissingField(field.Schema, field.Name) + return NewErrRelationMissingField(field.Kind.Underlying(), field.Name) } // if not finalized then we are missing one side of the relationship // unless this is an embedded object, which only have single-sided relations - _, shouldBeOneSidedRelation := embeddedObjNames[field.Schema] + _, shouldBeOneSidedRelation := embeddedObjNames[field.Kind.Underlying()] if shouldBeOneSidedRelation && rel.finalized { - return NewErrViewRelationMustBeOneSided(field.Name, field.Schema) + return NewErrViewRelationMustBeOneSided(field.Name, field.Kind.Underlying()) } if !shouldBeOneSidedRelation && !rel.finalized { - return client.NewErrRelationOneSided(field.Name, field.Schema) + return client.NewErrRelationOneSided(field.Name, field.Kind.Underlying()) } field.IsPrimaryRelation = fieldRelationType.isSet(relation_Type_Primary) diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go index cb19140d26..dc97705b5d 100644 --- a/request/graphql/schema/descriptions.go +++ b/request/graphql/schema/descriptions.go @@ -18,28 +18,6 @@ import ( ) var ( - // this is only here as a reference, and not to be used - // directly. As it will yield incorrect and unexpected - // results - - //nolint:unused - gqlTypeToFieldKindReference = map[gql.Type]client.FieldKind{ - gql.ID: client.FieldKind_DocID, - gql.Boolean: client.FieldKind_NILLABLE_BOOL, - gql.Int: client.FieldKind_NILLABLE_INT, - gql.Float: client.FieldKind_NILLABLE_FLOAT, - gql.DateTime: client.FieldKind_NILLABLE_DATETIME, - gql.String: client.FieldKind_NILLABLE_STRING, - &gql.Object{}: client.FieldKind_FOREIGN_OBJECT, - &gql.List{}: client.FieldKind_FOREIGN_OBJECT_ARRAY, - // Custom scalars - schemaTypes.BlobScalarType: client.FieldKind_NILLABLE_BLOB, - schemaTypes.JSONScalarType: client.FieldKind_NILLABLE_JSON, - // More custom ones to come - // - JSON - // - Counters - } - fieldKindToGQLType = map[client.FieldKind]gql.Type{ client.FieldKind_DocID: gql.ID, client.FieldKind_NILLABLE_BOOL: gql.Boolean, @@ -59,7 +37,6 @@ var ( client.FieldKind_NILLABLE_JSON: schemaTypes.JSONScalarType, } - // This map is fine to use defaultCRDTForFieldKind = map[client.FieldKind]client.CType{ client.FieldKind_DocID: client.LWW_REGISTER, client.FieldKind_NILLABLE_BOOL: client.LWW_REGISTER, @@ -77,8 +54,6 @@ var ( client.FieldKind_NILLABLE_STRING_ARRAY: client.LWW_REGISTER, client.FieldKind_NILLABLE_BLOB: client.LWW_REGISTER, client.FieldKind_NILLABLE_JSON: client.LWW_REGISTER, - client.FieldKind_FOREIGN_OBJECT: client.LWW_REGISTER, - client.FieldKind_FOREIGN_OBJECT_ARRAY: client.NONE_CRDT, } ) diff --git a/request/graphql/schema/descriptions_test.go b/request/graphql/schema/descriptions_test.go index 93f6b36d48..1540037a8d 100644 --- a/request/graphql/schema/descriptions_test.go +++ b/request/graphql/schema/descriptions_test.go @@ -177,9 +177,8 @@ func TestSingleSimpleType(t *testing.T) { { Name: "author", RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, + Kind: client.ObjectKind("Author"), Typ: client.NONE_CRDT, - Schema: "Author", }, { Name: "author_id", @@ -225,9 +224,8 @@ func TestSingleSimpleType(t *testing.T) { { Name: "published", RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, + Kind: client.ObjectKind("Book"), Typ: client.NONE_CRDT, - Schema: "Book", IsPrimaryRelation: true, }, { @@ -352,9 +350,8 @@ func TestSingleSimpleType(t *testing.T) { { Name: "author", RelationName: "book_authors", - Kind: client.FieldKind_FOREIGN_OBJECT, + Kind: client.ObjectKind("Author"), Typ: client.NONE_CRDT, - Schema: "Author", }, { Name: "author_id", @@ -400,9 +397,8 @@ func TestSingleSimpleType(t *testing.T) { { Name: "published", RelationName: "book_authors", - Kind: client.FieldKind_FOREIGN_OBJECT, + Kind: client.ObjectKind("Book"), Typ: client.NONE_CRDT, - Schema: "Book", IsPrimaryRelation: true, }, { @@ -447,9 +443,8 @@ func TestSingleSimpleType(t *testing.T) { { Name: "author", RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, + Kind: client.ObjectKind("Author"), Typ: client.NONE_CRDT, - Schema: "Author", IsPrimaryRelation: true, }, { @@ -496,9 +491,8 @@ func TestSingleSimpleType(t *testing.T) { { Name: "published", RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, + Kind: client.ObjectKind("Book"), Typ: client.NONE_CRDT, - Schema: "Book", }, { Name: "published_id", @@ -542,9 +536,8 @@ func TestSingleSimpleType(t *testing.T) { { Name: "author", RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT, + Kind: client.ObjectKind("Author"), Typ: client.NONE_CRDT, - Schema: "Author", IsPrimaryRelation: true, }, { @@ -591,9 +584,8 @@ func TestSingleSimpleType(t *testing.T) { { Name: "published", RelationName: "author_book", - Kind: client.FieldKind_FOREIGN_OBJECT_ARRAY, + Kind: client.ObjectArrayKind("Book"), Typ: client.NONE_CRDT, - Schema: "Book", }, }, }, diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index e4397e2e40..fc706041d8 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -460,16 +460,16 @@ func (g *Generator) buildTypes( } var ttype gql.Type - if field.Kind == client.FieldKind_FOREIGN_OBJECT { + if field.Kind.IsObject() && !field.Kind.IsArray() { var ok bool - ttype, ok = g.manager.schema.TypeMap()[field.Schema] + ttype, ok = g.manager.schema.TypeMap()[field.Kind.Underlying()] if !ok { - return nil, NewErrTypeNotFound(field.Schema) + return nil, NewErrTypeNotFound(field.Kind.Underlying()) } - } else if field.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { - t, ok := g.manager.schema.TypeMap()[field.Schema] + } else if field.Kind.IsObjectArray() { + t, ok := g.manager.schema.TypeMap()[field.Kind.Underlying()] if !ok { - return nil, NewErrTypeNotFound(field.Schema) + return nil, NewErrTypeNotFound(field.Kind.Underlying()) } ttype = gql.NewList(t) } else { @@ -566,9 +566,9 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin } var ttype gql.Type - if field.Kind == client.FieldKind_FOREIGN_OBJECT { + if field.Kind.IsObject() && !field.Kind.IsArray() { ttype = gql.ID - } else if field.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { + } else if field.Kind.IsObjectArray() { ttype = gql.NewList(gql.ID) } else { var ok bool diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go index c837b822a9..9b4acc440d 100644 --- a/tests/gen/gen_auto.go +++ b/tests/gen/gen_auto.go @@ -121,7 +121,7 @@ func (g *randomDocGenerator) getMaxTotalDemand() int { // getNextPrimaryDocID returns the docID of the next primary document to be used as a relation. func (g *randomDocGenerator) getNextPrimaryDocID(secondaryType string, field *client.SchemaFieldDescription) string { ind := g.configurator.usageCounter.getNextTypeIndForField(secondaryType, field) - return g.generatedDocs[field.Schema][ind].docID + return g.generatedDocs[field.Kind.Underlying()][ind].docID } func (g *randomDocGenerator) generateRandomDocs(order []string) error { @@ -226,10 +226,7 @@ func validateDefinitions(definitions []client.CollectionDefinition) error { return NewErrIncompleteColDefinition("field name is empty") } if field.Kind.IsObject() { - if field.Schema == "" { - return NewErrIncompleteColDefinition("field schema is empty") - } - fieldRefs = append(fieldRefs, field.Schema) + fieldRefs = append(fieldRefs, field.Kind.Underlying()) } } colNames[def.Description.Name.Value()] = struct{}{} diff --git a/tests/gen/gen_auto_configurator.go b/tests/gen/gen_auto_configurator.go index b4746ae437..7a17d74989 100644 --- a/tests/gen/gen_auto_configurator.go +++ b/tests/gen/gen_auto_configurator.go @@ -68,7 +68,7 @@ func (c *typeUsageCounters) addRelationUsage( field client.SchemaFieldDescription, minPerDoc, maxPerDoc, numDocs int, ) { - primaryType := field.Schema + primaryType := field.Kind.Underlying() if _, ok := c.m[primaryType]; !ok { c.m[primaryType] = make(map[string]map[string]*relationUsage) } @@ -82,7 +82,7 @@ func (c *typeUsageCounters) addRelationUsage( // getNextTypeIndForField returns the next index to be used for a foreign field. func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.SchemaFieldDescription) int { - current := c.m[field.Schema][secondaryType][field.Name] + current := c.m[field.Kind.Underlying()][secondaryType][field.Name] return current.useNextDocIDIndex() } @@ -273,7 +273,7 @@ func (g *docsGenConfigurator) getDemandForPrimaryType( ) (typeDemand, error) { primaryTypeDef := g.types[primaryType] for _, field := range primaryTypeDef.Schema.Fields { - if field.Kind.IsObject() && field.Schema == secondaryType { + if field.Kind.IsObject() && field.Kind.Underlying() == secondaryType { primaryDemand := typeDemand{min: secondaryDemand.min, max: secondaryDemand.max} minPerDoc, maxPerDoc := 1, 1 @@ -312,7 +312,7 @@ func (g *docsGenConfigurator) getDemandForPrimaryType( return typeDemand{}, NewErrCanNotSupplyTypeDemand(primaryType) } g.docsDemand[primaryType] = primaryDemand - g.initRelationUsages(field.Schema, primaryType, minPerDoc, maxPerDoc) + g.initRelationUsages(field.Kind.Underlying(), primaryType, minPerDoc, maxPerDoc) } } return secondaryDemand, nil @@ -344,7 +344,7 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( newSecDemand := typeDemand{min: primaryDocDemand.min, max: primaryDocDemand.max} minPerDoc, maxPerDoc := 1, 1 - curSecDemand, hasSecDemand := g.docsDemand[field.Schema] + curSecDemand, hasSecDemand := g.docsDemand[field.Kind.Underlying()] if field.Kind.IsArray() { fieldConf := g.config.ForField(typeName, field.Name) @@ -368,21 +368,26 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( if hasSecDemand { if curSecDemand.min < newSecDemand.min || curSecDemand.max > newSecDemand.max { - return NewErrCanNotSupplyTypeDemand(field.Schema) + return NewErrCanNotSupplyTypeDemand(field.Kind.Underlying()) } } else { - g.docsDemand[field.Schema] = newSecDemand + g.docsDemand[field.Kind.Underlying()] = newSecDemand } - g.initRelationUsages(field.Schema, typeName, minPerDoc, maxPerDoc) + g.initRelationUsages(field.Kind.Underlying(), typeName, minPerDoc, maxPerDoc) - err := g.calculateDemandForSecondaryTypes(field.Schema, primaryGraph) + err := g.calculateDemandForSecondaryTypes(field.Kind.Underlying(), primaryGraph) if err != nil { return err } - for _, primaryTypeName := range primaryGraph[field.Schema] { + for _, primaryTypeName := range primaryGraph[field.Kind.Underlying()] { if _, ok := g.docsDemand[primaryTypeName]; !ok { - primaryDemand, err := g.getDemandForPrimaryType(primaryTypeName, field.Schema, newSecDemand, primaryGraph) + primaryDemand, err := g.getDemandForPrimaryType( + primaryTypeName, + field.Kind.Underlying(), + newSecDemand, + primaryGraph, + ) if err != nil { return err } @@ -397,7 +402,7 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( func (g *docsGenConfigurator) initRelationUsages(secondaryType, primaryType string, minPerDoc, maxPerDoc int) { secondaryTypeDef := g.types[secondaryType] for _, secondaryTypeField := range secondaryTypeDef.Schema.Fields { - if secondaryTypeField.Schema == primaryType { + if secondaryTypeField.Kind.Underlying() == primaryType { g.usageCounter.addRelationUsage(secondaryType, secondaryTypeField, minPerDoc, maxPerDoc, g.docsDemand[primaryType].getAverage()) } @@ -420,9 +425,9 @@ func getRelationGraph(types map[string]client.CollectionDefinition) map[string][ for _, field := range typeDef.Schema.Fields { if field.Kind.IsObject() { if field.IsPrimaryRelation { - primaryGraph[typeName] = appendUnique(primaryGraph[typeName], field.Schema) + primaryGraph[typeName] = appendUnique(primaryGraph[typeName], field.Kind.Underlying()) } else { - primaryGraph[field.Schema] = appendUnique(primaryGraph[field.Schema], typeName) + primaryGraph[field.Kind.Underlying()] = appendUnique(primaryGraph[field.Kind.Underlying()], typeName) } } } diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index 0ddca543f2..52ee7eb58d 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -1212,9 +1212,8 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Kind: client.FieldKind_NILLABLE_INT, }, { - Name: "device", - Kind: client.FieldKind_FOREIGN_OBJECT, - Schema: "Device", + Name: "device", + Kind: client.ObjectKind("Device"), }, }, }, @@ -1233,8 +1232,7 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. }, { Name: "owner", - Kind: client.FieldKind_FOREIGN_OBJECT, - Schema: "User", + Kind: client.ObjectKind("User"), IsPrimaryRelation: true, }, }, @@ -1283,18 +1281,6 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. defs[1].Description.ID = 0 }, }, - { - name: "relation field is missing schema name", - changeDefs: func(defs []client.CollectionDefinition) { - defs[1].Schema.Fields[1].Schema = "" - }, - }, - { - name: "relation field references unknown schema", - changeDefs: func(defs []client.CollectionDefinition) { - defs[1].Schema.Fields[1].Schema = "Unknown" - }, - }, } for _, tc := range testCases { @@ -1336,8 +1322,7 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { }, { Name: "devices", - Kind: client.FieldKind_FOREIGN_OBJECT_ARRAY, - Schema: "Device", + Kind: client.ObjectArrayKind("Device"), RelationName: "Device_owner", }, }, @@ -1359,7 +1344,6 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Name: "owner_id", Kind: client.FieldKind_DocID, RelationName: "Device_owner", - Schema: "User", }, }, }, diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index 2f0960b977..723421f91b 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -66,14 +66,14 @@ func TestEventsSimpleWithUpdate(t *testing.T) { ExpectedUpdates: []testUtils.ExpectedUpdate{ { DocID: immutable.Some(docID1), - Cid: immutable.Some("bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4"), + Cid: immutable.Some("bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste"), }, { DocID: immutable.Some(docID2), }, { DocID: immutable.Some(docID1), - Cid: immutable.Some("bafybeiah75qvtqxflw3urgejxetaugpcddx5h2ocj7pid34zjyy7tpp6wi"), + Cid: immutable.Some("bafybeidpwcpixokptqamh7qvngbrm335mvrzs3skrlwdmkq6nmqesoj4sm"), }, }, } diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go index 958dc113f1..b500ce1daf 100644 --- a/tests/integration/mutation/create/with_version_test.go +++ b/tests/integration/mutation/create/with_version_test.go @@ -39,7 +39,7 @@ func TestMutationCreate_ReturnsVersionCID(t *testing.T) { { "_version": []map[string]any{ { - "cid": "bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4", + "cid": "bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste", }, }, }, diff --git a/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go b/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go index b5990a050f..8fd73fe06a 100644 --- a/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go +++ b/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go @@ -76,7 +76,7 @@ func TestP2PSubscribeAddGetMultiple(t *testing.T) { }, testUtils.GetAllP2PCollections{ NodeID: 1, - ExpectedCollectionIDs: []int{0, 2}, + ExpectedCollectionIDs: []int{2, 0}, }, }, } diff --git a/tests/integration/net/state/simple/replicator/with_create_test.go b/tests/integration/net/state/simple/replicator/with_create_test.go index 3cec12b351..9fee99880a 100644 --- a/tests/integration/net/state/simple/replicator/with_create_test.go +++ b/tests/integration/net/state/simple/replicator/with_create_test.go @@ -492,7 +492,7 @@ func TestP2POneToOneReplicatorOrderIndependent(t *testing.T) { "name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", + "schemaVersionId": "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", }, }, }, @@ -552,7 +552,7 @@ func TestP2POneToOneReplicatorOrderIndependentDirectCreate(t *testing.T) { "_docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", + "schemaVersionId": "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", }, }, }, diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go index 4239e7cfd6..7297f7fa4a 100644 --- a/tests/integration/query/commits/simple_test.go +++ b/tests/integration/query/commits/simple_test.go @@ -36,13 +36,13 @@ func TestQueryCommits(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, @@ -79,22 +79,22 @@ func TestQueryCommitsMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibhain2764v7eltfiam6dgwivfj56mvbme34nbdsdbndrsjkc2cje", + "cid": "bafybeiev2thtqxttuhr3aq5dvyb3aif4cey7werksskw5xuetmwxjxi7ty", }, { - "cid": "bafybeickrd5xayjhedyypf3yus55bkhpwd5dqlkdhivrcceexkpsgnic24", + "cid": "bafybeifos5iir63tmp3bdoj7zr5aand4ud2tf2qfnjlh6nvrzw3knkewuy", }, { - "cid": "bafybeieqyyprwrkbgyn7x4jkzmlnupnzpdymvbulef37brkzn7blqbe6l4", + "cid": "bafybeicjb4x47xk6koh4uhgokhjr5zbg3bhbcfoa4um4vdktnbhrsx6d2a", }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, @@ -125,16 +125,16 @@ func TestQueryCommitsWithSchemaVersionIdField(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", }, }, }, @@ -349,7 +349,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { `, Results: []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "collectionID": int64(1), "delta": testUtils.CBORValue(22), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -358,13 +358,13 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "name": "_head", }, }, }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "collectionID": int64(1), "delta": testUtils.CBORValue(21), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -374,7 +374,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "collectionID": int64(1), "delta": testUtils.CBORValue("John"), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -384,7 +384,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "collectionID": int64(1), "delta": nil, "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -393,17 +393,17 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "name": "_head", }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "name": "age", }, }, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "collectionID": int64(1), "delta": nil, "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -412,11 +412,11 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_cid_test.go b/tests/integration/query/commits/with_cid_test.go index 4878ea8f9a..30eab52b47 100644 --- a/tests/integration/query/commits/with_cid_test.go +++ b/tests/integration/query/commits/with_cid_test.go @@ -38,14 +38,14 @@ func TestQueryCommitsWithCid(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi" + cid: "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, @@ -71,14 +71,14 @@ func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi" + cid: "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, diff --git a/tests/integration/query/commits/with_depth_test.go b/tests/integration/query/commits/with_depth_test.go index cdda45101c..e31a18c9c7 100644 --- a/tests/integration/query/commits/with_depth_test.go +++ b/tests/integration/query/commits/with_depth_test.go @@ -36,13 +36,13 @@ func TestQueryCommitsWithDepth1(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, @@ -81,16 +81,16 @@ func TestQueryCommitsWithDepth1WithUpdate(t *testing.T) { Results: []map[string]any{ { // "Age" field head - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "height": int64(1), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, }, @@ -137,27 +137,27 @@ func TestQueryCommitsWithDepth2WithUpdate(t *testing.T) { Results: []map[string]any{ { // Composite head - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", "height": int64(3), }, { // Composite head -1 - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "height": int64(1), }, { // "Age" field head - "cid": "bafybeid4y4vqmvec2mvm3su77rrmj6tzsx5zdlt6ias4hzqxbevmosydc4", + "cid": "bafybeid44afmsi6hh6yasgcjncnlvdpqsu2durizsxhdmhsbrqekypf6aa", "height": int64(3), }, { // "Age" field head -1 - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, }, @@ -195,22 +195,22 @@ func TestQueryCommitsWithDepth1AndMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeifysgo74dhzl2t74j5qh32t5uufar7otua6ggvsapjbxpzimcbnoi", + "cid": "bafybeieepmzk3s5dzxztfq5zi5e5g3mnb6yfumx3euknnbur4x3a5neidq", }, { - "cid": "bafybeibqvujxi4tjtrwg5igvg6zdvjaxvkmb5h2msjbtta3lmytgs7hft4", + "cid": "bafybeifcxdrzqfj54w5mls7mf6nhxtjnweoevves7rwzsda6gmvzqc4t7y", }, { - "cid": "bafybeib7zmofgbtvxcb3gy3bfbwp3btqrmoacmxl4duqhwlvwu6pihzbeu", + "cid": "bafybeihavavtkfgaevtnzbabdwmgpamgbpkonw4ardalsamcitspqaxhs4", }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_cid_test.go b/tests/integration/query/commits/with_doc_id_cid_test.go index 434e8b27aa..abaaa4b434 100644 --- a/tests/integration/query/commits/with_doc_id_cid_test.go +++ b/tests/integration/query/commits/with_doc_id_cid_test.go @@ -104,14 +104,14 @@ func TestQueryCommitsWithDocIDAndCidWithUpdate(t *testing.T) { Request: ` { commits( docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", - cid: "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a" + cid: "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_count_test.go b/tests/integration/query/commits/with_doc_id_count_test.go index 3cd01352ad..da28665990 100644 --- a/tests/integration/query/commits/with_doc_id_count_test.go +++ b/tests/integration/query/commits/with_doc_id_count_test.go @@ -37,15 +37,15 @@ func TestQueryCommitsWithDocIDAndLinkCount(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "_count": 0, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "_count": 0, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "_count": 2, }, }, diff --git a/tests/integration/query/commits/with_doc_id_field_test.go b/tests/integration/query/commits/with_doc_id_field_test.go index de672e8d70..790fa672a1 100644 --- a/tests/integration/query/commits/with_doc_id_field_test.go +++ b/tests/integration/query/commits/with_doc_id_field_test.go @@ -118,7 +118,7 @@ func TestQueryCommitsWithDocIDAndFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", }, }, }, @@ -150,7 +150,7 @@ func TestQueryCommitsWithDocIDAndCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_limit_offset_test.go index d7981cc6ce..84be4f5682 100644 --- a/tests/integration/query/commits/with_doc_id_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_offset_test.go @@ -57,10 +57,10 @@ func TestQueryCommitsWithDocIDAndLimitAndOffset(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_test.go b/tests/integration/query/commits/with_doc_id_limit_test.go index b31a3b848e..a84344a402 100644 --- a/tests/integration/query/commits/with_doc_id_limit_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_test.go @@ -50,10 +50,10 @@ func TestQueryCommitsWithDocIDAndLimit(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go index 135418b8f2..6ccf0cca44 100644 --- a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go @@ -58,11 +58,11 @@ func TestQueryCommitsWithDocIDAndOrderAndLimitAndOffset(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, { - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", "height": int64(3), }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_test.go b/tests/integration/query/commits/with_doc_id_order_test.go index 10009bab11..02f4426958 100644 --- a/tests/integration/query/commits/with_doc_id_order_test.go +++ b/tests/integration/query/commits/with_doc_id_order_test.go @@ -44,23 +44,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightDesc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "height": int64(2), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "height": int64(1), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "height": int64(1), }, }, @@ -99,23 +99,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "height": int64(1), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "height": int64(1), }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "height": int64(2), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, }, @@ -154,24 +154,24 @@ func TestQueryCommitsWithDocIDAndOrderCidDesc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "height": int64(1), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", - "height": int64(2), + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "height": int64(1), }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", - "height": int64(1), + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "height": int64(2), }, }, }, @@ -209,23 +209,23 @@ func TestQueryCommitsWithDocIDAndOrderCidAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", - "height": int64(1), + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "height": int64(2), }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "height": int64(1), }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", - "height": int64(2), + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "height": int64(1), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "height": int64(1), }, }, @@ -278,39 +278,39 @@ func TestQueryCommitsWithDocIDAndOrderAndMultiUpdatesCidAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "height": int64(1), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "height": int64(1), }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "height": int64(2), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, { - "cid": "bafybeic45wkhxtpn3vgd2dmmohq76vw56qz3cpu3oorha3hf2w6qu7bpoa", + "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", "height": int64(3), }, { - "cid": "bafybeid4y4vqmvec2mvm3su77rrmj6tzsx5zdlt6ias4hzqxbevmosydc4", + "cid": "bafybeid44afmsi6hh6yasgcjncnlvdpqsu2durizsxhdmhsbrqekypf6aa", "height": int64(3), }, { - "cid": "bafybeiatfviresatclvedt6zhk4ys7p6cdts5udqsl33nu5d2hxtw4l6la", + "cid": "bafybeifq2bd3nkqa6q5tjb5lrmeoskimtchhodvcxdqeilck2x4k3z7ijq", "height": int64(4), }, { - "cid": "bafybeiaydxxf7bmeh5ou47z6exa73heg6vjjzznbvrxqbemmu55sdhvuom", + "cid": "bafybeiakoro6m2bvfmtmczykyffvixx6ci7tbgczebhdwttx5ymh5c7wyy", "height": int64(4), }, }, diff --git a/tests/integration/query/commits/with_doc_id_test.go b/tests/integration/query/commits/with_doc_id_test.go index a08f82f3a0..b57219df46 100644 --- a/tests/integration/query/commits/with_doc_id_test.go +++ b/tests/integration/query/commits/with_doc_id_test.go @@ -62,13 +62,13 @@ func TestQueryCommitsWithDocID(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, @@ -102,22 +102,22 @@ func TestQueryCommitsWithDocIDAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "links": []map[string]any{}, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "links": []map[string]any{}, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "name": "name", }, }, @@ -158,23 +158,23 @@ func TestQueryCommitsWithDocIDAndUpdate(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "height": int64(2), }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "height": int64(1), }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "height": int64(1), }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "height": int64(2), }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "height": int64(1), }, }, @@ -219,44 +219,44 @@ func TestQueryCommitsWithDocIDAndUpdateAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "name": "_head", }, }, }, { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "links": []map[string]any{}, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "links": []map[string]any{}, }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", "links": []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "name": "_head", }, { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", "name": "age", }, }, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_doc_id_typename_test.go b/tests/integration/query/commits/with_doc_id_typename_test.go index 09dcc4060f..17a1422d7b 100644 --- a/tests/integration/query/commits/with_doc_id_typename_test.go +++ b/tests/integration/query/commits/with_doc_id_typename_test.go @@ -37,15 +37,15 @@ func TestQueryCommitsWithDocIDWithTypeName(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "__typename": "Commit", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "__typename": "Commit", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "__typename": "Commit", }, }, diff --git a/tests/integration/query/commits/with_field_test.go b/tests/integration/query/commits/with_field_test.go index 01a2204326..bebd35b828 100644 --- a/tests/integration/query/commits/with_field_test.go +++ b/tests/integration/query/commits/with_field_test.go @@ -66,7 +66,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", }, }, }, @@ -98,7 +98,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, @@ -131,8 +131,8 @@ func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionId(t *testing. }`, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", }, }, }, diff --git a/tests/integration/query/commits/with_group_test.go b/tests/integration/query/commits/with_group_test.go index 362829ee0b..414df231c2 100644 --- a/tests/integration/query/commits/with_group_test.go +++ b/tests/integration/query/commits/with_group_test.go @@ -89,10 +89,10 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(2), "_group": []map[string]any{ { - "cid": "bafybeicwg56ddi7smy3j2kkv5y4yghvdrj3twqqafzdwtinbkw5mlpxwz4", + "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", }, { - "cid": "bafybeidxnkwhuzmkdw5wuippru3tp74vcmz5jvcziambpjadxeathdh26a", + "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", }, }, }, @@ -100,13 +100,13 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(1), "_group": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", }, }, }, @@ -142,7 +142,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "_group": []map[string]any{ { "height": int64(1), @@ -150,7 +150,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "_group": []map[string]any{ { "height": int64(1), @@ -158,7 +158,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "_group": []map[string]any{ { "height": int64(1), diff --git a/tests/integration/query/latest_commits/with_doc_id_field_test.go b/tests/integration/query/latest_commits/with_doc_id_field_test.go index c1fce06eb6..4c7ed89f9c 100644 --- a/tests/integration/query/latest_commits/with_doc_id_field_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_field_test.go @@ -68,7 +68,7 @@ func TestQueryLatestCommitsWithDocIDAndFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "links": []map[string]any{}, }, }, @@ -101,14 +101,14 @@ func TestQueryLatestCommitsWithDocIDAndCompositeFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "name": "name", }, }, diff --git a/tests/integration/query/latest_commits/with_doc_id_test.go b/tests/integration/query/latest_commits/with_doc_id_test.go index 0c34c4dab2..0ef237c300 100644 --- a/tests/integration/query/latest_commits/with_doc_id_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_test.go @@ -38,14 +38,14 @@ func TestQueryLatestCommitsWithDocID(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", "links": []map[string]any{ { - "cid": "bafybeicvpgfinf2m2jufbbcy5mhv6jca6in5k4fzx5op7xvvcmbp7sceaa", + "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", "name": "age", }, { - "cid": "bafybeib2espk2hq366wjnmazg45uvoswqbvf4plx7fgzayagxdn737onci", + "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", "name": "name", }, }, @@ -75,8 +75,8 @@ func TestQueryLatestCommitsWithDocIDWithSchemaVersionIdField(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeigvpf62j7j2wbpid5iavzxielbhbsbbirmgzqkw3wpptdvysuztwi", - "schemaVersionId": "bafkreidqkjb23ngp34eebeaxiogrlogkpfz62vjb3clnnyvhlbgdaywkg4", + "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", }, }, } diff --git a/tests/integration/query/one_to_many/with_cid_doc_id_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go index c9dd0ff4ba..843bf12638 100644 --- a/tests/integration/query/one_to_many/with_cid_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -104,7 +104,7 @@ func TestQueryOneToManyWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidshqlc7z2psrtfhmrarsxwxwwis6baxjrzs2x6mdmzsop6b7hnii" + cid: "bafybeidixr5nt7vb5go4nx675exjubb6g7sn2upltkfvf4piepgcd5ntjm" docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -179,7 +179,7 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidshqlc7z2psrtfhmrarsxwxwwis6baxjrzs2x6mdmzsop6b7hnii", + cid: "bafybeidixr5nt7vb5go4nx675exjubb6g7sn2upltkfvf4piepgcd5ntjm", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -252,7 +252,7 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidshqlc7z2psrtfhmrarsxwxwwis6baxjrzs2x6mdmzsop6b7hnii", + cid: "bafybeidixr5nt7vb5go4nx675exjubb6g7sn2upltkfvf4piepgcd5ntjm", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { rating @@ -324,7 +324,7 @@ func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeiefqhex3axofwy2gwdynhs6rijwrpkdpwy5fnqnzbk3e7iwcgvrqa", + cid: "bafybeicuhxlsrkonczjlrpj77xbg6fxgkncictecifxe7rdw4egxs72kse", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { rating diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go index f19bc4e9fa..fee91f7399 100644 --- a/tests/integration/query/simple/with_cid_doc_id_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -93,7 +93,7 @@ func TestQuerySimpleWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4", + cid: "bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -135,7 +135,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4", + cid: "bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -177,7 +177,7 @@ func TestQuerySimpleWithUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeickytibhqnqtwhpjfi7ponnu5756ifo76oxb2ksxrz4iiqaywg3lu", + cid: "bafybeicowz6vraybays3br77rm4yzkiykr6jlp3mmsbyqbkcvk2cdukdru", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -224,7 +224,7 @@ func TestQuerySimpleWithUpdateAndMiddleCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeickytibhqnqtwhpjfi7ponnu5756ifo76oxb2ksxrz4iiqaywg3lu", + cid: "bafybeicowz6vraybays3br77rm4yzkiykr6jlp3mmsbyqbkcvk2cdukdru", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -266,7 +266,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) testUtils.Request{ Request: `query { Users ( - cid: "bafybeidzstxabh7qktq7pkmmxvpjbnwklxz3h5l6d425ldvjy65xvvuxu4", + cid: "bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -280,7 +280,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) "name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", + "schemaVersionId": "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", }, }, }, @@ -324,7 +324,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeiebqzqml6nn3laarr7yekakrsdnkn4nbgrl4xc5rshljp3in6au2m", + cid: "bafybeihd4uju62lpqft3fheevde2cmcehty3zqkbpyp2zu2ehfwietcu5i", docID: "bae-a688789e-d8a6-57a7-be09-22e005ab79e0" ) { name @@ -376,7 +376,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeifzuh74aq47vjngkwipjne4r2gi3v2clewgsruspqirihnps4vcmu", + cid: "bafybeiecgpblwcvgs3lw66v2p7frvwwak4gg4754dax742lomfxfrrvb4i", docID: "bae-fa6a97e9-e0e9-5826-8a8c-57775d35e07c" ) { name diff --git a/tests/integration/query/simple/with_version_test.go b/tests/integration/query/simple/with_version_test.go index 0f3866f910..08032dd694 100644 --- a/tests/integration/query/simple/with_version_test.go +++ b/tests/integration/query/simple/with_version_test.go @@ -46,14 +46,14 @@ func TestQuerySimpleWithEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", "links": []map[string]any{ { - "cid": "bafybeic45t5rj54wx47fhaqm6dubwt2cf5fkqzwm2nea7ypam3f6s2zbk4", + "cid": "bafybeibdnm4rrtu5upewruipxb5zcvytgjfhvhnvobifkyrsddyacdboxy", "name": "Age", }, { - "cid": "bafybeifkcrogypyaq5iw7krgi5jd26s7jlfsy5u232e7e7y7dqe3wm2hcu", + "cid": "bafybeiekpxtt3nuqygah2dta3ztauifvx6dbw3sjrl6hi76tkxrjfzcste", "name": "Name", }, }, @@ -90,7 +90,7 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithSchemaVersionId(t *testing.T) { "Name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiekkppcdl573ru624wh3kwkmy2nhqzjsvqpu6jv5dgq2kidpnon4u", + "schemaVersionId": "bafkreics522ai3tdep2trfeesb6csl5wqul4dexhhueha6b2xarmcctyoa", }, }, }, @@ -171,14 +171,14 @@ func TestQuerySimpleWithMultipleAliasedEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", "L1": []map[string]any{ { - "cid": "bafybeic45t5rj54wx47fhaqm6dubwt2cf5fkqzwm2nea7ypam3f6s2zbk4", + "cid": "bafybeibdnm4rrtu5upewruipxb5zcvytgjfhvhnvobifkyrsddyacdboxy", "name": "Age", }, { - "cid": "bafybeifkcrogypyaq5iw7krgi5jd26s7jlfsy5u232e7e7y7dqe3wm2hcu", + "cid": "bafybeiekpxtt3nuqygah2dta3ztauifvx6dbw3sjrl6hi76tkxrjfzcste", "name": "Name", }, }, @@ -242,7 +242,7 @@ func TestQuery_WithAllCommitFields_NoError(t *testing.T) { "_docID": docID, "_version": []map[string]any{ { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -251,15 +251,15 @@ func TestQuery_WithAllCommitFields_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeic45t5rj54wx47fhaqm6dubwt2cf5fkqzwm2nea7ypam3f6s2zbk4", + "cid": "bafybeibdnm4rrtu5upewruipxb5zcvytgjfhvhnvobifkyrsddyacdboxy", "name": "Age", }, { - "cid": "bafybeifkcrogypyaq5iw7krgi5jd26s7jlfsy5u232e7e7y7dqe3wm2hcu", + "cid": "bafybeiekpxtt3nuqygah2dta3ztauifvx6dbw3sjrl6hi76tkxrjfzcste", "name": "Name", }, }, - "schemaVersionId": "bafkreiekkppcdl573ru624wh3kwkmy2nhqzjsvqpu6jv5dgq2kidpnon4u", + "schemaVersionId": "bafkreics522ai3tdep2trfeesb6csl5wqul4dexhhueha6b2xarmcctyoa", }, }, }, @@ -321,7 +321,7 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "_docID": docID, "_version": []map[string]any{ { - "cid": "bafybeigcjabzlkuj4j35boczgcl4jmars7gz5a7dfvpq3m344bzth7ebqq", + "cid": "bafybeieywntwsejjuxxrwhlcudadsyc6xhy3pt6rcdhom3zvdewqhmncve", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -330,18 +330,18 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeihzra5nmcai4omdv2hkplrpexjsau62eaa2ndrf2b7ksxvl7hx3qm", + "cid": "bafybeibb6sup35cb4tjrgetjqkqshg3r56vk5up7ruz3rddqklttnk7yfi", "name": "Age", }, { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", "name": "_head", }, }, - "schemaVersionId": "bafkreiekkppcdl573ru624wh3kwkmy2nhqzjsvqpu6jv5dgq2kidpnon4u", + "schemaVersionId": "bafkreics522ai3tdep2trfeesb6csl5wqul4dexhhueha6b2xarmcctyoa", }, { - "cid": "bafybeicojqe66grk564b2hns3zi6rhquqvugxj6wi4s6xk4e2gg65dzx5e", + "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -350,15 +350,15 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeic45t5rj54wx47fhaqm6dubwt2cf5fkqzwm2nea7ypam3f6s2zbk4", + "cid": "bafybeibdnm4rrtu5upewruipxb5zcvytgjfhvhnvobifkyrsddyacdboxy", "name": "Age", }, { - "cid": "bafybeifkcrogypyaq5iw7krgi5jd26s7jlfsy5u232e7e7y7dqe3wm2hcu", + "cid": "bafybeiekpxtt3nuqygah2dta3ztauifvx6dbw3sjrl6hi76tkxrjfzcste", "name": "Name", }, }, - "schemaVersionId": "bafkreiekkppcdl573ru624wh3kwkmy2nhqzjsvqpu6jv5dgq2kidpnon4u", + "schemaVersionId": "bafkreics522ai3tdep2trfeesb6csl5wqul4dexhhueha6b2xarmcctyoa", }, }, }, diff --git a/tests/integration/schema/crdt_type_test.go b/tests/integration/schema/crdt_type_test.go index 073a8e4e83..47388262d0 100644 --- a/tests/integration/schema/crdt_type_test.go +++ b/tests/integration/schema/crdt_type_test.go @@ -20,7 +20,7 @@ import ( ) func TestSchemaCreate_ContainsPNCounterTypeWithIntKind_NoError(t *testing.T) { - schemaVersionID := "bafkreib2rcnzkjrwabw6kx7qnncfuylugukoosilmb2dct5qylmgec7fdu" + schemaVersionID := "bafkreihg7aweuwitzdtturuipps2rxw774o5iu36ovxqawdncxa4yibpsq" test := testUtils.TestCase{ Actions: []any{ @@ -59,7 +59,7 @@ func TestSchemaCreate_ContainsPNCounterTypeWithIntKind_NoError(t *testing.T) { } func TestSchemaCreate_ContainsPNCounterTypeWithFloatKind_NoError(t *testing.T) { - schemaVersionID := "bafkreiddz4h2oqi3qzfeqfbjt3wpwrvtm62r4l6uche2nxyullmlmezrsq" + schemaVersionID := "bafkreig7olui76coe4nmm6s7f6lza7d7i35rurktxhcbmrs4po7plcrnvu" test := testUtils.TestCase{ Actions: []any{ diff --git a/tests/integration/schema/get_schema_test.go b/tests/integration/schema/get_schema_test.go index f809b58627..9f7d3bea3c 100644 --- a/tests/integration/schema/get_schema_test.go +++ b/tests/integration/schema/get_schema_test.go @@ -71,9 +71,9 @@ func TestGetSchema_GivenNoSchemaGivenUnknownName(t *testing.T) { } func TestGetSchema_ReturnsAllSchema(t *testing.T) { - usersSchemaVersion1ID := "bafkreiegrxzoqa3mdgjsfz2vuatbpjbnqxub6yi23dvdumjpt4g3nhiwzq" - usersSchemaVersion2ID := "bafkreidic23paxtc5sannovwkpp6kmpg7xufufz4dgxjsiq2exk2wieh4a" - booksSchemaVersion1ID := "bafkreiakx6sdz3govsorfppdv2pru4fgjzt2qljgjhpkxnkyr7kl4vhdme" + usersSchemaVersion1ID := "bafkreiaopue5oiqzbszdk265wl6lqkqc44glt2tgjncbwek447slainu7m" + usersSchemaVersion2ID := "bafkreibuxh4vi3xsob5vx22bn3i5osbkxtimdl2nrs74cqxuf2w3ys2f3y" + booksSchemaVersion1ID := "bafkreicwmtpmea4gis6lkt46l5evd2xhais36qd5egb2b7mjrqnojbtzja" test := testUtils.TestCase{ Actions: []any{ @@ -98,9 +98,9 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { testUtils.GetSchema{ ExpectedResults: []client.SchemaDescription{ { - Name: "Books", - Root: booksSchemaVersion1ID, - VersionID: booksSchemaVersion1ID, + Name: "Users", + Root: usersSchemaVersion1ID, + VersionID: usersSchemaVersion1ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", @@ -126,9 +126,9 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { }, }, { - Name: "Users", - Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, + Name: "Books", + Root: booksSchemaVersion1ID, + VersionID: booksSchemaVersion1ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", @@ -145,8 +145,8 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { - usersSchemaVersion1ID := "bafkreiegrxzoqa3mdgjsfz2vuatbpjbnqxub6yi23dvdumjpt4g3nhiwzq" - usersSchemaVersion2ID := "bafkreidic23paxtc5sannovwkpp6kmpg7xufufz4dgxjsiq2exk2wieh4a" + usersSchemaVersion1ID := "bafkreiaopue5oiqzbszdk265wl6lqkqc44glt2tgjncbwek447slainu7m" + usersSchemaVersion2ID := "bafkreibuxh4vi3xsob5vx22bn3i5osbkxtimdl2nrs74cqxuf2w3ys2f3y" test := testUtils.TestCase{ Actions: []any{ @@ -174,28 +174,28 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { { Name: "Users", Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion2ID, + VersionID: usersSchemaVersion1ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, }, }, }, { Name: "Users", Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, + VersionID: usersSchemaVersion2ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, + Typ: client.LWW_REGISTER, + }, + { + Name: "name", + Kind: client.FieldKind_NILLABLE_STRING, + Typ: client.LWW_REGISTER, }, }, }, @@ -208,8 +208,8 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { - usersSchemaVersion1ID := "bafkreiegrxzoqa3mdgjsfz2vuatbpjbnqxub6yi23dvdumjpt4g3nhiwzq" - usersSchemaVersion2ID := "bafkreidic23paxtc5sannovwkpp6kmpg7xufufz4dgxjsiq2exk2wieh4a" + usersSchemaVersion1ID := "bafkreiaopue5oiqzbszdk265wl6lqkqc44glt2tgjncbwek447slainu7m" + usersSchemaVersion2ID := "bafkreibuxh4vi3xsob5vx22bn3i5osbkxtimdl2nrs74cqxuf2w3ys2f3y" test := testUtils.TestCase{ Actions: []any{ @@ -237,28 +237,28 @@ func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { { Name: "Users", Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion2ID, + VersionID: usersSchemaVersion1ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, }, }, }, { Name: "Users", Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, + VersionID: usersSchemaVersion2ID, Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, + Typ: client.LWW_REGISTER, + }, + { + Name: "name", + Kind: client.FieldKind_NILLABLE_STRING, + Typ: client.LWW_REGISTER, }, }, }, diff --git a/tests/integration/schema/migrations/query/simple_test.go b/tests/integration/schema/migrations/query/simple_test.go index c80b1386dd..150e70a0a4 100644 --- a/tests/integration/schema/migrations/query/simple_test.go +++ b/tests/integration/schema/migrations/query/simple_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQuery(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -115,8 +115,8 @@ func TestSchemaMigrationQueryMultipleDocs(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -178,8 +178,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredBeforeSchemaPatch(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -254,8 +254,8 @@ func TestSchemaMigrationQueryMigratesToIntermediaryVersion(t *testing.T) { // Register a migration from schema version 1 to schema version 2 **only** - // there should be no migration from version 2 to version 3. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -325,8 +325,8 @@ func TestSchemaMigrationQueryMigratesFromIntermediaryVersion(t *testing.T) { // Register a migration from schema version 2 to schema version 3 **only** - // there should be no migration from version 1 to version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -394,8 +394,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -411,8 +411,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -466,8 +466,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatches(t *test }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -483,8 +483,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatches(t *test }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -553,8 +553,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatchesWrongOrd testUtils.ConfigureMigration{ // Declare the migration from v2=>v3 before declaring the migration from v1=>v2 LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -570,8 +570,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatchesWrongOrd }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -712,8 +712,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingScalarField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -773,8 +773,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingInlineArrayField(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiasjk4ypvsmdiebxadvhdnpvq4eun6wielebzlcnipyqr357bz7ou", - DestinationSchemaVersionID: "bafkreie7zotytkhmsp7ro5dqyf75fwrafos4xowgatalicbcb3lu5lfade", + SourceSchemaVersionID: "bafkreicm3axeowuuorrvlpvzatvnsaa6224qt7erlzjjhevwkndn532pxe", + DestinationSchemaVersionID: "bafkreih4urgndwhrvjoruj55yv5n3luvvky4daq67ivahiici7yn35mkfu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -836,8 +836,8 @@ func TestSchemaMigrationQueryMigrationRemovesExistingField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", - DestinationSchemaVersionID: "bafkreibqzsrn3acwn7hkakm2ko5i4t5pdarmylvodi5tnpxunfcwmut2ua", + SourceSchemaVersionID: "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", + DestinationSchemaVersionID: "bafkreig5ovmx3vbhskpazxzjvlezy4brrndxu7bhdn5z2iqnozvw5iliwu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -897,8 +897,8 @@ func TestSchemaMigrationQueryMigrationPreservesExistingFieldWhenFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", - DestinationSchemaVersionID: "bafkreibqzsrn3acwn7hkakm2ko5i4t5pdarmylvodi5tnpxunfcwmut2ua", + SourceSchemaVersionID: "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", + DestinationSchemaVersionID: "bafkreig5ovmx3vbhskpazxzjvlezy4brrndxu7bhdn5z2iqnozvw5iliwu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -971,8 +971,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", - DestinationSchemaVersionID: "bafkreicf3nvrorgv2v6czh2lkakibv4me2il5xxytqxfyof7jlmkkdkle4", + SourceSchemaVersionID: "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", + DestinationSchemaVersionID: "bafkreihmw2xtrfccga6dy2nsh2sqwnzmbsygm5xkoltf4v3u4vdrinliki", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -1033,8 +1033,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcAndDstFieldNotRe }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiewca6o66mgkpbai2vtrupolvtf66wllbvouvtwo6fkc6alrybzfa", - DestinationSchemaVersionID: "bafkreicf3nvrorgv2v6czh2lkakibv4me2il5xxytqxfyof7jlmkkdkle4", + SourceSchemaVersionID: "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", + DestinationSchemaVersionID: "bafkreihmw2xtrfccga6dy2nsh2sqwnzmbsygm5xkoltf4v3u4vdrinliki", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_doc_id_test.go b/tests/integration/schema/migrations/query/with_doc_id_test.go index 3acb7ab890..70bf0040e3 100644 --- a/tests/integration/schema/migrations/query/with_doc_id_test.go +++ b/tests/integration/schema/migrations/query/with_doc_id_test.go @@ -52,8 +52,8 @@ func TestSchemaMigrationQueryByDocID(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -158,8 +158,8 @@ func TestSchemaMigrationQueryMultipleQueriesByDocID(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_p2p_test.go b/tests/integration/schema/migrations/query/with_p2p_test.go index 2b22fba89d..4e9bee6828 100644 --- a/tests/integration/schema/migrations/query/with_p2p_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_test.go @@ -46,8 +46,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu", - DestinationSchemaVersionID: "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha", + SourceSchemaVersionID: "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy", + DestinationSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -145,8 +145,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchOlderSchemaVersion(t *tes testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu", - DestinationSchemaVersionID: "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha", + SourceSchemaVersionID: "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy", + DestinationSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -163,8 +163,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchOlderSchemaVersion(t *tes testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha", - DestinationSchemaVersionID: "bafkreidtw4d7bv57wmwwwxkejburwuktc2kiakkmzgiacyy5vl7gj2ih5i", + SourceSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", + DestinationSchemaVersionID: "bafkreihxxnewvatrejbay6uwon5pcxxh2427txtq3ozwc5qybc2hwyn4s4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -253,8 +253,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtNewerSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu", - DestinationSchemaVersionID: "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha", + SourceSchemaVersionID: "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy", + DestinationSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -355,8 +355,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchNewerSchemaVersionWithSch // Register a migration from version 2 to version 3 on both nodes. // There is no migration from version 1 to 2, thus node 1 has no knowledge of schema version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", - DestinationSchemaVersionID: "bafkreicyyn7ourjvr2o6bqa57z2bl5wz5u2ykdlmd5v7n53cw7l6xsdplm", + SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_restart_test.go b/tests/integration/schema/migrations/query/with_restart_test.go index 196b5cf57e..7d7525b910 100644 --- a/tests/integration/schema/migrations/query/with_restart_test.go +++ b/tests/integration/schema/migrations/query/with_restart_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithRestart(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -99,8 +99,8 @@ func TestSchemaMigrationQueryWithRestartAndMigrationBeforeSchemaPatch(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_set_default_test.go b/tests/integration/schema/migrations/query/with_set_default_test.go index d18f2f4092..8501d68a8e 100644 --- a/tests/integration/schema/migrations/query/with_set_default_test.go +++ b/tests/integration/schema/migrations/query/with_set_default_test.go @@ -22,7 +22,7 @@ import ( ) func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t *testing.T) { - schemaVersionID2 := "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha" + schemaVersionID2 := "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq" test := testUtils.TestCase{ Description: "Test schema migration", @@ -83,8 +83,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t * } func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t *testing.T) { - schemaVersionID1 := "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu" - schemaVersionID2 := "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha" + schemaVersionID1 := "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy" + schemaVersionID2 := "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq" test := testUtils.TestCase{ Description: "Test schema migration", @@ -158,8 +158,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t } func TestSchemaMigrationQuery_WithSetDefaultToOriginalVersionThatDocWasCreatedAt_ClearsMigrations(t *testing.T) { - schemaVersionID1 := "bafkreiaqs2jvnjgddkkhxzhhfmrr6o4yohhqymbi55b7ltynxo4tmge4wu" - schemaVersionID2 := "bafkreigc5whyvnmgqvdr6yk366ct4dddgmwnwrnbgbmu4f3edm3sfwerha" + schemaVersionID1 := "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy" + schemaVersionID2 := "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq" test := testUtils.TestCase{ Description: "Test schema migration", diff --git a/tests/integration/schema/migrations/query/with_txn_test.go b/tests/integration/schema/migrations/query/with_txn_test.go index a4cbba67f8..f22d4bcbc4 100644 --- a/tests/integration/schema/migrations/query/with_txn_test.go +++ b/tests/integration/schema/migrations/query/with_txn_test.go @@ -47,8 +47,8 @@ func TestSchemaMigrationQueryWithTxn(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -109,8 +109,8 @@ func TestSchemaMigrationQueryWithTxnAndCommit(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_update_test.go b/tests/integration/schema/migrations/query/with_update_test.go index b01c197c46..b3ddd94e77 100644 --- a/tests/integration/schema/migrations/query/with_update_test.go +++ b/tests/integration/schema/migrations/query/with_update_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithUpdateRequest(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -129,8 +129,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredAfterUpdate(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/simple_test.go b/tests/integration/schema/migrations/simple_test.go index 07fa12ca53..6b7767943a 100644 --- a/tests/integration/schema/migrations/simple_test.go +++ b/tests/integration/schema/migrations/simple_test.go @@ -106,8 +106,8 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", - DestinationSchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -154,11 +154,11 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, { ID: 3, - SchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", + SchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", }, { ID: 4, - SchemaVersionID: "bafkreiexwzcpjuz3eaghcanr3fnmyc6el5w6i5ovhop5zfrqctucwlraba", + SchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", Sources: []any{ &client.CollectionSource{ SourceCollectionID: 3, diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index b8ca9c71e7..854321a170 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -20,7 +20,7 @@ import ( ) func TestSchemaSimpleCreatesSchemaGivenEmptyType(t *testing.T) { - schemaVersionID := "bafkreiegrxzoqa3mdgjsfz2vuatbpjbnqxub6yi23dvdumjpt4g3nhiwzq" + schemaVersionID := "bafkreiaopue5oiqzbszdk265wl6lqkqc44glt2tgjncbwek447slainu7m" test := testUtils.TestCase{ Actions: []any{ diff --git a/tests/integration/schema/updates/add/field/create_update_test.go b/tests/integration/schema/updates/add/field/create_update_test.go index 0fa756891c..6ce10243c0 100644 --- a/tests/integration/schema/updates/add/field/create_update_test.go +++ b/tests/integration/schema/updates/add/field/create_update_test.go @@ -17,8 +17,8 @@ import ( ) func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoin(t *testing.T) { - initialSchemaVersionId := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - updatedSchemaVersionId := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" + initialSchemaVersionId := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + updatedSchemaVersionId := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, version join", @@ -105,8 +105,8 @@ func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoi } func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndCommitQuery(t *testing.T) { - initialSchemaVersionId := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - updatedSchemaVersionId := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" + initialSchemaVersionId := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + updatedSchemaVersionId := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, commits query", diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index f1f8c05411..4b1247718d 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -19,7 +19,7 @@ import ( func TestSchemaUpdatesAddFieldKindForeignObjectArray(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17)", + Description: "Test schema update, add field with kind foreign object array", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -34,31 +34,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 17} } ] `, - ExpectedError: "a schema name must be provided when adding a new relation field. Field: foo, Kind: 17", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_InvalidSchemaJson(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), invalid schema json", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 17, "Schema": 123} } - ] - `, - ExpectedError: "json: cannot unmarshal number into Go struct field SchemaFieldDescription.Fields.Schema of type string", + ExpectedError: "no type found for given name. Type: 17", }, }, } @@ -80,7 +56,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingRelationName(t *test Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 17, "Schema": "Users" + "Name": "foo", "Kind": "[Users]" }} ] `, @@ -106,7 +82,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingKind(t *testi Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id"} } ] @@ -120,7 +96,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingKind(t *testi func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldInvalidKind(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), id field invalid kind", + Description: "Test schema update, add field with kind foreign object array, id field invalid kind", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -133,7 +109,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldInvalidKind(t *testi Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 2} } ] @@ -147,7 +123,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldInvalidKind(t *testi func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingRelationName(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), id field missing relation name", + Description: "Test schema update, add field with kind foreign object array, id field missing relation name", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -160,7 +136,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingRelationName( Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 1} } ] @@ -174,7 +150,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingRelationName( func TestSchemaUpdatesAddFieldKindForeignObjectArray_OnlyHalfRelationDefined(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), only half relation defined", + Description: "Test schema update, add field with kind foreign object array, only half relation defined", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -187,7 +163,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_OnlyHalfRelationDefined(t * Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" @@ -203,7 +179,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_OnlyHalfRelationDefined(t * func TestSchemaUpdatesAddFieldKindForeignObjectArray_NoPrimaryDefined(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), no primary defined", + Description: "Test schema update, add field with kind foreign object array, no primary defined", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -216,13 +192,13 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_NoPrimaryDefined(t *testing Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 17, "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" }} ] `, @@ -235,7 +211,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_NoPrimaryDefined(t *testing func TestSchemaUpdatesAddFieldKindForeignObjectArray_PrimaryDefinedOnManySide(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), no primary defined", + Description: "Test schema update, add field with kind foreign object array, no primary defined", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -248,13 +224,13 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_PrimaryDefinedOnManySide(t Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 17, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "[Users]", "IsPrimaryRelation": true, "RelationName": "foo" }} ] `, @@ -269,7 +245,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), valid, functional", + Description: "Test schema update, add field with kind foreign object array, valid, functional", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -282,13 +258,13 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 17, "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" }} ] `, @@ -365,7 +341,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_SinglePrimaryObjectKindSubs key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with single object Kind substitution", + Description: "Test schema update, add field with kind foreign object array, with single object Kind substitution", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -378,13 +354,13 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_SinglePrimaryObjectKindSubs Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 17, "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" }} ] `, @@ -444,7 +420,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_SingleSecondaryObjectKindSu key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with single object Kind substitution", + Description: "Test schema update, add field with kind foreign object array, with single object Kind substitution", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -457,13 +433,13 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_SingleSecondaryObjectKindSu Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" }} ] `, @@ -523,7 +499,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitution(t *t key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with object Kind substitution", + Description: "Test schema update, add field with kind foreign object array, with object Kind substitution", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -536,13 +512,13 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitution(t *t Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" }} ] `, @@ -677,84 +653,6 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitutionWithA testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindForeignObjectArray_PrimaryObjectKindAndSchemaMismatch(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with Kind and Schema mismatch", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Dog", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "Schema": "Users", "RelationName": "foo" - }} - ] - `, - ExpectedError: "field Kind does not match field Schema. Kind: Users, Schema: Dog", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_SecondaryObjectKindAndSchemaMismatch(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with Kind and Schema mismatch", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "Schema": "Dog", "RelationName": "foo" - }} - ] - `, - ExpectedError: "field Kind does not match field Schema. Kind: [Users], Schema: Dog", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingPrimaryIDField(t *testing.T) { key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go index 794ce0a546..af51ec335d 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go @@ -19,7 +19,7 @@ import ( func TestSchemaUpdatesAddFieldKindForeignObject(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16)", + Description: "Test schema update, add field with kind foreign object", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -34,31 +34,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 16} } ] `, - ExpectedError: "a schema name must be provided when adding a new relation field. Field: foo, Kind: 16", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_InvalidSchemaJson(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), invalid schema json", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 16, "Schema": 123} } - ] - `, - ExpectedError: "json: cannot unmarshal number into Go struct field SchemaFieldDescription.Fields.Schema of type string", + ExpectedError: "no type found for given name. Type: 16", }, }, } @@ -67,7 +43,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_InvalidSchemaJson(t *testing.T) func TestSchemaUpdatesAddFieldKindForeignObject_UnknownSchema(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), unknown schema", + Description: "Test schema update, add field with kind foreign object, unknown schema", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -80,11 +56,11 @@ func TestSchemaUpdatesAddFieldKindForeignObject_UnknownSchema(t *testing.T) { Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Unknown" + "Name": "foo", "Kind": "Unknown" }} ] `, - ExpectedError: "no schema found for given name. Field: foo, Schema: Unknown", + ExpectedError: "no type found for given name. Field: foo, Kind: Unknown", }, }, } @@ -93,7 +69,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_UnknownSchema(t *testing.T) { func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationName(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), missing relation name", + Description: "Test schema update, add field with kind foreign object, missing relation name", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -106,7 +82,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationName(t *testing.T Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Users" + "Name": "foo", "Kind": "Users" }} ] `, @@ -119,7 +95,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationName(t *testing.T func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), id field missing kind", + Description: "Test schema update, add field with kind foreign object, id field missing kind", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -132,7 +108,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16,"IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users","IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id"} } ] @@ -146,7 +122,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), id field invalid kind", + Description: "Test schema update, add field with kind foreign object, id field invalid kind", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -159,7 +135,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 2} } ] @@ -173,7 +149,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingRelationName(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), id field missing relation name", + Description: "Test schema update, add field with kind foreign object, id field missing relation name", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -186,7 +162,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingRelationName(t *te Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 1} } ] @@ -200,7 +176,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingRelationName(t *te func TestSchemaUpdatesAddFieldKindForeignObject_OnlyHalfRelationDefined(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), only half relation defined", + Description: "Test schema update, add field with kind foreign object, only half relation defined", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -213,7 +189,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_OnlyHalfRelationDefined(t *testi Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" @@ -229,7 +205,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_OnlyHalfRelationDefined(t *testi func TestSchemaUpdatesAddFieldKindForeignObject_NoPrimaryDefined(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), no primary defined", + Description: "Test schema update, add field with kind foreign object, no primary defined", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -242,13 +218,13 @@ func TestSchemaUpdatesAddFieldKindForeignObject_NoPrimaryDefined(t *testing.T) { Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 16, "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "Users", "RelationName": "foo" }} ] `, @@ -261,7 +237,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_NoPrimaryDefined(t *testing.T) { func TestSchemaUpdatesAddFieldKindForeignObject_BothSidesPrimary(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), both sides primary", + Description: "Test schema update, add field with kind foreign object, both sides primary", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -274,16 +250,16 @@ func TestSchemaUpdatesAddFieldKindForeignObject_BothSidesPrimary(t *testing.T) { Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "Schema": "Users", "RelationName": "foo" + "Name": "foobar_id", "Kind": 1, "RelationName": "foo" }} ] `, @@ -298,7 +274,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), valid, functional", + Description: "Test schema update, add field with kind foreign object, valid, functional", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -311,13 +287,13 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foo_id", "Kind": 1, "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 16, "Schema": "Users", "RelationName": "foo" + "Name": "foobar", "Kind": "Users", "RelationName": "foo" }}, { "op": "add", "path": "/Users/Fields/-", "value": { "Name": "foobar_id", "Kind": 1, "RelationName": "foo" @@ -391,368 +367,6 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindForeignObject_SinglePrimaryObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with single object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": 16, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_SingleSecondaryObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with single object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": 16, "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "Schema": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindSubstitutionWithAutoSchemaValues(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindAndSchemaMismatch(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with Kind and Schema mismatch", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "Schema": "Dog", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - ExpectedError: "field Kind does not match field Schema. Kind: Users, Schema: Dog", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - func TestSchemaUpdatesAddFieldKindForeignObject_MissingPrimaryIDField(t *testing.T) { key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" diff --git a/tests/integration/schema/updates/add/field/kind/invalid_test.go b/tests/integration/schema/updates/add/field/kind/invalid_test.go index b9c6dbbf31..331804a100 100644 --- a/tests/integration/schema/updates/add/field/kind/invalid_test.go +++ b/tests/integration/schema/updates/add/field/kind/invalid_test.go @@ -140,9 +140,9 @@ func TestSchemaUpdatesAddFieldKind198(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindInvalidSubstitution(t *testing.T) { +func TestSchemaUpdatesAddFieldKindInvalid(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind unsupported (198)", + Description: "Test schema update, add field with kind unsupported", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -157,7 +157,7 @@ func TestSchemaUpdatesAddFieldKindInvalidSubstitution(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "InvalidKind"} } ] `, - ExpectedError: "no type found for given name. Kind: InvalidKind", + ExpectedError: "no type found for given name. Field: foo, Kind: InvalidKind", }, }, } diff --git a/tests/integration/schema/updates/add/field/simple_test.go b/tests/integration/schema/updates/add/field/simple_test.go index c505668325..45a9b6afd5 100644 --- a/tests/integration/schema/updates/add/field/simple_test.go +++ b/tests/integration/schema/updates/add/field/simple_test.go @@ -20,8 +20,8 @@ import ( ) func TestSchemaUpdatesAddFieldSimple(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" + schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -115,8 +115,8 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_Errors(t *testing.T) { } func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" + schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" test := testUtils.TestCase{ Description: "Test schema update, add field", diff --git a/tests/integration/schema/updates/copy/field/simple_test.go b/tests/integration/schema/updates/copy/field/simple_test.go index 5721a9fb8b..a2c631a515 100644 --- a/tests/integration/schema/updates/copy/field/simple_test.go +++ b/tests/integration/schema/updates/copy/field/simple_test.go @@ -154,12 +154,12 @@ func TestSchemaUpdatesCopyFieldAndReplaceNameAndInvalidKindSubstitution(t *testi // re-typing the clone. Patch: ` [ - { "op": "copy", "from": "/Users/Schema/Fields/1", "path": "/Users/Fields/2" }, + { "op": "copy", "from": "/Users/Fields/1", "path": "/Users/Fields/2" }, { "op": "replace", "path": "/Users/Fields/2/Name", "value": "Age" }, { "op": "replace", "path": "/Users/Fields/2/Kind", "value": "NotAValidKind" } ] `, - ExpectedError: "no type found for given name. Kind: NotAValidKind", + ExpectedError: "no type found for given name. Field: Age, Kind: NotAValidKind", }, }, } diff --git a/tests/integration/schema/updates/move/simple_test.go b/tests/integration/schema/updates/move/simple_test.go index 94ecfcf1bb..2e33c709d6 100644 --- a/tests/integration/schema/updates/move/simple_test.go +++ b/tests/integration/schema/updates/move/simple_test.go @@ -17,7 +17,7 @@ import ( ) func TestSchemaUpdatesMoveCollectionDoesNothing(t *testing.T) { - schemaVersionID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" + schemaVersionID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" test := testUtils.TestCase{ Description: "Test schema update, move collection", diff --git a/tests/integration/schema/updates/remove/fields/simple_test.go b/tests/integration/schema/updates/remove/fields/simple_test.go index ef2ed6f6db..fae9b85dc7 100644 --- a/tests/integration/schema/updates/remove/fields/simple_test.go +++ b/tests/integration/schema/updates/remove/fields/simple_test.go @@ -141,35 +141,6 @@ func TestSchemaUpdatesRemoveFieldTypErrors(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesRemoveFieldSchemaErrors(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, remove field Schema", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Author { - name: String - book: [Book] - } - type Book { - name: String - author: Author - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "remove", "path": "/Author/Fields/1/Schema" } - ] - `, - ExpectedError: "mutating an existing field is not supported. ProposedName: book", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - func TestSchemaUpdatesRemoveFieldRelationNameErrors(t *testing.T) { test := testUtils.TestCase{ Description: "Test schema update, remove field RelationName", diff --git a/tests/integration/schema/updates/test/field/simple_test.go b/tests/integration/schema/updates/test/field/simple_test.go index afde980f97..effdb162fe 100644 --- a/tests/integration/schema/updates/test/field/simple_test.go +++ b/tests/integration/schema/updates/test/field/simple_test.go @@ -102,7 +102,7 @@ func TestSchemaUpdatesTestFieldPasses(t *testing.T) { Patch: ` [ { "op": "test", "path": "/Users/Fields/1", "value": { - "Name": "name", "Kind": 11, "Schema":"", "IsPrimaryRelation":false, "RelationName":"", "Typ":1 + "Name": "name", "Kind": 11, "IsPrimaryRelation":false, "RelationName":"", "Typ":1 } } ] `, @@ -127,7 +127,7 @@ func TestSchemaUpdatesTestFieldPasses_UsingFieldNameAsIndex(t *testing.T) { Patch: ` [ { "op": "test", "path": "/Users/Fields/name", "value": { - "Kind": 11, "Schema":"", "IsPrimaryRelation":false, "RelationName":"", "Typ":1 + "Kind": 11, "IsPrimaryRelation":false, "RelationName":"", "Typ":1 } } ] `, diff --git a/tests/integration/schema/updates/with_schema_branch_test.go b/tests/integration/schema/updates/with_schema_branch_test.go index e6e6e6e850..a47a5f4bb4 100644 --- a/tests/integration/schema/updates/with_schema_branch_test.go +++ b/tests/integration/schema/updates/with_schema_branch_test.go @@ -20,9 +20,9 @@ import ( ) func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" - schemaVersion3ID := "bafkreieilqyv4bydakul5tbikpysmzwhzvxdau4twcny5n46zvxhkv7oli" + schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" + schemaVersion3ID := "bafkreiawvcmcwounww6dbzb2vlvvstqf7venmktd4tsgxkw4o4undmtipe" test := testUtils.TestCase{ Description: "Test schema update, with branching schema", @@ -169,10 +169,10 @@ func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { } func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" - schemaVersion3ID := "bafkreieilqyv4bydakul5tbikpysmzwhzvxdau4twcny5n46zvxhkv7oli" - schemaVersion4ID := "bafkreicy4llechrh44zwviafs2ptjnr7sloiajjvpp7buaknhwspfevnt4" + schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" + schemaVersion3ID := "bafkreiawvcmcwounww6dbzb2vlvvstqf7venmktd4tsgxkw4o4undmtipe" + schemaVersion4ID := "bafkreidqp7ha7mfhwqpahevcpsn5etmi3soawyq76oytdxlyozvs6cgyui" test := testUtils.TestCase{ Description: "Test schema update, with patch on branching schema", @@ -307,9 +307,9 @@ func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { } func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" - schemaVersion3ID := "bafkreieilqyv4bydakul5tbikpysmzwhzvxdau4twcny5n46zvxhkv7oli" + schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" + schemaVersion3ID := "bafkreiawvcmcwounww6dbzb2vlvvstqf7venmktd4tsgxkw4o4undmtipe" test := testUtils.TestCase{ Description: "Test schema update, with branching schema toggling between branches", @@ -403,10 +403,10 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *tes } func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPatch(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" - schemaVersion2ID := "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4" - schemaVersion3ID := "bafkreieilqyv4bydakul5tbikpysmzwhzvxdau4twcny5n46zvxhkv7oli" - schemaVersion4ID := "bafkreict4nqhcurfkjskxlek3djpep2acwlfkztughoum4dsvuwigkfqzi" + schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" + schemaVersion3ID := "bafkreiawvcmcwounww6dbzb2vlvvstqf7venmktd4tsgxkw4o4undmtipe" + schemaVersion4ID := "bafkreih5trmbzpjdgterha2amx2n6opgwlpvdyxfeyfi2uq7ncbodpl2cu" test := testUtils.TestCase{ Description: "Test schema update, with branching schema toggling between branches then patch", @@ -545,7 +545,7 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPat } func TestSchemaUpdates_WithBranchingSchemaAndGetCollectionAtVersion(t *testing.T) { - schemaVersion1ID := "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4" + schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" test := testUtils.TestCase{ Description: `Test schema update, with branching schema toggling between branches and gets the diff --git a/tests/integration/schema/with_update_set_default_test.go b/tests/integration/schema/with_update_set_default_test.go index e5179eb814..9203a61655 100644 --- a/tests/integration/schema/with_update_set_default_test.go +++ b/tests/integration/schema/with_update_set_default_test.go @@ -92,7 +92,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToOriginal_NewFieldIsNotQueriable( SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetActiveSchemaVersion{ - SchemaVersionID: "bafkreiebcgze3rs6j3g7gu65dwskdg5fn3qby5c6nqffhbdkcy2l5bbvp4", + SchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", }, testUtils.Request{ Request: `query { @@ -129,7 +129,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToNew_AllowsQueryingOfNewField(t * SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetActiveSchemaVersion{ - SchemaVersionID: "bafkreidn4f3i52756wevi3sfpbqzijgy6v24zh565pmvtmpqr4ou52v2q4", + SchemaVersionID: "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 930b429119..830a76ba2d 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -746,6 +746,7 @@ func refreshCollections( for _, collection := range allCollections { if collection.Name().Value() == collectionName { s.collections[nodeID][i] = collection + println(collection.Description().SchemaVersionID) break } } diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go index 8252156e55..d83d1594fd 100644 --- a/tests/predefined/gen_predefined.go +++ b/tests/predefined/gen_predefined.go @@ -135,7 +135,7 @@ func (this *docGenerator) generatePrimary( if secDocField.IsRelation() { if secDocMapField, hasField := secDocMap[secDocField.Name]; hasField { if secDocField.IsPrimaryRelation { - primType := this.types[secDocField.Schema] + primType := this.types[secDocField.Kind.Underlying()] primDocMap, subResult, err := this.generatePrimary( secDocMap[secDocField.Name].(map[string]any), &primType) if err != nil { @@ -200,7 +200,7 @@ func (this *docGenerator) generateSecondaryDocs( if field.IsRelation() { if _, hasProp := primaryDocMap[field.Name]; hasProp { if !field.IsPrimaryRelation && - (parentTypeName == "" || parentTypeName != field.Schema) { + (parentTypeName == "" || parentTypeName != field.Kind.Underlying()) { docs, err := this.generateSecondaryDocsForField( primaryDocMap, primaryType.Description.Name.Value(), &field, docID) if err != nil { @@ -222,10 +222,10 @@ func (this *docGenerator) generateSecondaryDocsForField( primaryDocID string, ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} - relTypeDef := this.types[relField.Schema] + relTypeDef := this.types[relField.Kind.Underlying()] primaryPropName := "" for _, relDocField := range relTypeDef.Schema.Fields { - if relDocField.Schema == primaryTypeName && relDocField.IsPrimaryRelation { + if relDocField.Kind.Underlying() == primaryTypeName && relDocField.IsPrimaryRelation { primaryPropName = relDocField.Name + request.RelatedObjectID switch relVal := primaryDoc[relField.Name].(type) { case []map[string]any: From d4851b839a5cf9ef83af8c40bf8bd40afc5bfb4d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:31:18 -0400 Subject: [PATCH 06/49] bot: Update dependencies (bulk dependabot PRs) 19-03-2024 (#2426) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #2420 bot: Bump @typescript-eslint/parser from 7.1.1 to 7.2.0 in /playground #2419 bot: Bump vite from 5.1.5 to 5.1.6 in /playground #2418 bot: Bump swagger-ui-react from 5.11.10 to 5.12.0 in /playground #2417 bot: Bump @types/react from 18.2.64 to 18.2.66 in /playground #2415 bot: Bump follow-redirects from 1.15.5 to 1.15.6 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- playground/package-lock.json | 123 ++++++++++++++++++++++++++++------- playground/package.json | 8 +-- 2 files changed, 103 insertions(+), 28 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index e01bcfbc4d..1efd31a3b4 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -12,20 +12,20 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.11.9" + "swagger-ui-react": "^5.12.0" }, "devDependencies": { - "@types/react": "^18.2.61", + "@types/react": "^18.2.66", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.1.0", - "@typescript-eslint/parser": "^7.1.0", + "@typescript-eslint/parser": "^7.2.0", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.3", - "vite": "^5.1.4" + "vite": "^5.1.6" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -2357,9 +2357,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.64", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.64.tgz", - "integrity": "sha512-MlmPvHgjj2p3vZaxbQgFUQFvD8QiZwACfGqEdDSWou5yISWxDQ4/74nCAwsUiX7UFLKZz3BbVSPj+YxeoGGCfg==", + "version": "18.2.66", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.66.tgz", + "integrity": "sha512-OYTmMI4UigXeFMF/j4uv0lBBEbongSgptPrHBxqME44h9+yNov+oL6Z3ocJKo0WyXR84sQUNeyIp9MRfckvZpg==", "devOptional": true, "dependencies": { "@types/prop-types": "*", @@ -2451,15 +2451,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.1.1.tgz", - "integrity": "sha512-ZWUFyL0z04R1nAEgr9e79YtV5LbafdOtN7yapNbn1ansMyaegl2D4bL7vHoJ4HPSc4CaLwuCVas8CVuneKzplQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.2.0.tgz", + "integrity": "sha512-5FKsVcHTk6TafQKQbuIVkXq58Fnbkd2wDL4LB7AURN7RUOu1utVP+G8+6u3ZhEroW3DF6hyo3ZEXxgKgp4KeCg==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.1.1", - "@typescript-eslint/types": "7.1.1", - "@typescript-eslint/typescript-estree": "7.1.1", - "@typescript-eslint/visitor-keys": "7.1.1", + "@typescript-eslint/scope-manager": "7.2.0", + "@typescript-eslint/types": "7.2.0", + "@typescript-eslint/typescript-estree": "7.2.0", + "@typescript-eslint/visitor-keys": "7.2.0", "debug": "^4.3.4" }, "engines": { @@ -2478,6 +2478,81 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.2.0.tgz", + "integrity": "sha512-Qh976RbQM/fYtjx9hs4XkayYujB/aPwglw2choHmf3zBjB4qOywWSdt9+KLRdHubGcoSwBnXUH2sR3hkyaERRg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.2.0", + "@typescript-eslint/visitor-keys": "7.2.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.2.0.tgz", + "integrity": "sha512-XFtUHPI/abFhm4cbCDc5Ykc8npOKBSJePY3a3s+lwumt7XWJuzP5cZcfZ610MIPHjQjNsOLlYK8ASPaNG8UiyA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.2.0.tgz", + "integrity": "sha512-cyxS5WQQCoBwSakpMrvMXuMDEbhOo9bNHHrNcEWis6XHx6KF518tkF1wBvKIn/tpq5ZpUYK7Bdklu8qY0MsFIA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.2.0", + "@typescript-eslint/visitor-keys": "7.2.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.2.0.tgz", + "integrity": "sha512-c6EIQRHhcpl6+tO8EMR+kjkkV+ugUNXOmeASA1rlzkd8EPIriavpWoiEz1HR/VLhbVIdhqnV6E7JZm00cBDx2A==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.2.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/scope-manager": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.1.1.tgz", @@ -3596,9 +3671,9 @@ "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.5", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", - "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", @@ -5784,9 +5859,9 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.11.10", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.11.10.tgz", - "integrity": "sha512-X5HwC5h/HN5txkjOmSfL2nuhQH3fkePSdH8rrvqKFKwzZpvUYw0CmwBpBkJyQm24FuI7U9q/k3/ru6dVG32cQw==", + "version": "5.12.0", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.12.0.tgz", + "integrity": "sha512-9QHLaGuo1x8jyGjGQMPOtcDFYH4lFb7L+FIYtplldUzo/JFgwfgUnpnUWadnhkVRJEGsHJqoy7IWJ4dK7WaPRQ==", "dependencies": { "@babel/runtime-corejs3": "^7.24.0", "@braintree/sanitize-url": "=7.0.0", @@ -5817,7 +5892,7 @@ "reselect": "^5.1.0", "serialize-error": "^8.1.0", "sha.js": "^2.4.11", - "swagger-client": "^3.25.4", + "swagger-client": "^3.26.0", "url-parse": "^1.5.10", "xml": "=1.0.1", "xml-but-prettier": "^1.0.1", @@ -6106,9 +6181,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.1.5.tgz", - "integrity": "sha512-BdN1xh0Of/oQafhU+FvopafUp6WaYenLU/NFoL5WyJL++GxkNfieKzBhM24H3HVsPQrlAqB7iJYTHabzaRed5Q==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.1.6.tgz", + "integrity": "sha512-yYIAZs9nVfRJ/AiOLCA91zzhjsHUgMjB+EigzFb6W2XTLO8JixBCKCjvhKZaye+NKYHCrkv3Oh50dH9EdLU2RA==", "dev": true, "dependencies": { "esbuild": "^0.19.3", diff --git a/playground/package.json b/playground/package.json index 0930fe41ea..240e710c40 100644 --- a/playground/package.json +++ b/playground/package.json @@ -14,19 +14,19 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.11.9" + "swagger-ui-react": "^5.12.0" }, "devDependencies": { - "@types/react": "^18.2.61", + "@types/react": "^18.2.66", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.1.0", - "@typescript-eslint/parser": "^7.1.0", + "@typescript-eslint/parser": "^7.2.0", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.3", - "vite": "^5.1.4" + "vite": "^5.1.6" } } From 915aecf76ada543233690f2021a9f9ee8be81d34 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:09:12 -0700 Subject: [PATCH 07/49] bot: Bump @typescript-eslint/parser from 7.2.0 to 7.3.1 in /playground (#2428) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 7.2.0 to 7.3.1.
Release notes

Sourced from @​typescript-eslint/parser's releases.

v7.3.1

7.3.1 (2024-03-18)

🩹 Fixes

  • eslint-plugin: [no-floating-promises] revert disable of ignoreVoid in strict config (#8718)

❤️ Thank You

  • Josh Goldberg ✨

You can read about our versioning strategy and releases on our website.

v7.3.0

7.3.0 (2024-03-18)

Note that this release enables rules to have more restrictive default options as configured in the strict and strict-type-checked configs. See #8364 for more details.

🚀 Features

  • eslint-plugin: [restrict-template-expressions] add allowArray option (#8389)
  • eslint-plugin: add meta.docs.recommended setting for strict config options (#8364)
  • eslint-plugin: add rule use-unknown-in-catch-callback-variables (#8383)
  • eslint-plugin: [prefer-reduce-type-parameter] supports tuple, union, intersection (#8642)
  • eslint-plugin-internal: add internal lint rule no-relative-paths-to-internal-packages (#8596)
  • typescript-estree: disallow switch statements with multiple default cases (#8411)
  • utils: add parser name to thrown parser error message (#8484)

🩹 Fixes

  • correct engines.node constraints in package.json (#8671)
  • eslint-plugin: [unbound-method] check method definition in object literal using longhand form (#8637)
  • eslint-plugin: [consistent-type-imports] handle imports without specifiers (#8308)
  • eslint-plugin: [no-redundant-type-constituents] incorrectly marks & string as redundant (#8282)
  • eslint-plugin: [no-unnecessary-qualifier] handle merge namespace with enum (#8591)
  • eslint-plugin: [no-unused-expressions] false negatives when using assertions (#8668)
  • eslint-plugin: [ban-ts-comment] more accurate handling of multiline comments (#8416)
  • eslint-plugin: [explicit-function-return-type, explicit-module-boundary-types] improved checking for allowHigherOrderFunctions option (#8508)
  • eslint-plugin: [class-literal-property-style] ignore property assigned in constructor (#8412)
  • eslint-plugin: [no-unnecessary-type-assertion] fix false negative for const variable declarations (#8558)
  • typescript-estree: fix the issue of single run inferring in the pnpm repo (#3811, #8702)

❤️ Thank You

... (truncated)

Changelog

Sourced from @​typescript-eslint/parser's changelog.

7.3.1 (2024-03-18)

This was a version bump only for parser to align it with other projects, there were no code changes.

You can read about our versioning strategy and releases on our website.

7.3.0 (2024-03-18)

🩹 Fixes

  • correct engines.node constraints in package.json

❤️ Thank You

  • Abraham Guo
  • Alexu
  • Arka Pratim Chaudhuri
  • auvred
  • Derrick Isaacson
  • fnx
  • Josh Goldberg ✨
  • Kirk Waiblinger
  • Marta Cardoso
  • Michaël De Boey
  • Tristan Rasmussen
  • YeonJuan

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/parser&package-manager=npm_and_yarn&previous-version=7.2.0&new-version=7.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 60 ++++++++++++++++++------------------ playground/package.json | 2 +- 2 files changed, 31 insertions(+), 31 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 1efd31a3b4..ad2c8f6623 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,7 +19,7 @@ "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.1.0", - "@typescript-eslint/parser": "^7.2.0", + "@typescript-eslint/parser": "^7.3.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -2451,19 +2451,19 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.2.0.tgz", - "integrity": "sha512-5FKsVcHTk6TafQKQbuIVkXq58Fnbkd2wDL4LB7AURN7RUOu1utVP+G8+6u3ZhEroW3DF6hyo3ZEXxgKgp4KeCg==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.3.1.tgz", + "integrity": "sha512-Rq49+pq7viTRCH48XAbTA+wdLRrB/3sRq4Lpk0oGDm0VmnjBrAOVXH/Laalmwsv2VpekiEfVFwJYVk6/e8uvQw==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.2.0", - "@typescript-eslint/types": "7.2.0", - "@typescript-eslint/typescript-estree": "7.2.0", - "@typescript-eslint/visitor-keys": "7.2.0", + "@typescript-eslint/scope-manager": "7.3.1", + "@typescript-eslint/types": "7.3.1", + "@typescript-eslint/typescript-estree": "7.3.1", + "@typescript-eslint/visitor-keys": "7.3.1", "debug": "^4.3.4" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2479,16 +2479,16 @@ } }, "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.2.0.tgz", - "integrity": "sha512-Qh976RbQM/fYtjx9hs4XkayYujB/aPwglw2choHmf3zBjB4qOywWSdt9+KLRdHubGcoSwBnXUH2sR3hkyaERRg==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.3.1.tgz", + "integrity": "sha512-fVS6fPxldsKY2nFvyT7IP78UO1/I2huG+AYu5AMjCT9wtl6JFiDnsv4uad4jQ0GTFzcUV5HShVeN96/17bTBag==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.2.0", - "@typescript-eslint/visitor-keys": "7.2.0" + "@typescript-eslint/types": "7.3.1", + "@typescript-eslint/visitor-keys": "7.3.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2496,12 +2496,12 @@ } }, "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.2.0.tgz", - "integrity": "sha512-XFtUHPI/abFhm4cbCDc5Ykc8npOKBSJePY3a3s+lwumt7XWJuzP5cZcfZ610MIPHjQjNsOLlYK8ASPaNG8UiyA==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.3.1.tgz", + "integrity": "sha512-2tUf3uWggBDl4S4183nivWQ2HqceOZh1U4hhu4p1tPiIJoRRXrab7Y+Y0p+dozYwZVvLPRI6r5wKe9kToF9FIw==", "dev": true, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2509,13 +2509,13 @@ } }, "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.2.0.tgz", - "integrity": "sha512-cyxS5WQQCoBwSakpMrvMXuMDEbhOo9bNHHrNcEWis6XHx6KF518tkF1wBvKIn/tpq5ZpUYK7Bdklu8qY0MsFIA==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.3.1.tgz", + "integrity": "sha512-tLpuqM46LVkduWP7JO7yVoWshpJuJzxDOPYIVWUUZbW+4dBpgGeUdl/fQkhuV0A8eGnphYw3pp8d2EnvPOfxmQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.2.0", - "@typescript-eslint/visitor-keys": "7.2.0", + "@typescript-eslint/types": "7.3.1", + "@typescript-eslint/visitor-keys": "7.3.1", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2524,7 +2524,7 @@ "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2537,16 +2537,16 @@ } }, "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.2.0.tgz", - "integrity": "sha512-c6EIQRHhcpl6+tO8EMR+kjkkV+ugUNXOmeASA1rlzkd8EPIriavpWoiEz1HR/VLhbVIdhqnV6E7JZm00cBDx2A==", + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.3.1.tgz", + "integrity": "sha512-9RMXwQF8knsZvfv9tdi+4D/j7dMG28X/wMJ8Jj6eOHyHWwDW4ngQJcqEczSsqIKKjFiLFr40Mnr7a5ulDD3vmw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.2.0", + "@typescript-eslint/types": "7.3.1", "eslint-visitor-keys": "^3.4.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", diff --git a/playground/package.json b/playground/package.json index 240e710c40..0445d27a75 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.1.0", - "@typescript-eslint/parser": "^7.2.0", + "@typescript-eslint/parser": "^7.3.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", From d7931ff0e8ebbe3e2645191bd0f3c468506abbb2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Mar 2024 03:25:48 -0700 Subject: [PATCH 08/49] bot: Bump @types/react from 18.2.66 to 18.2.67 in /playground (#2427) Bumps [@types/react](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/react) from 18.2.66 to 18.2.67.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@types/react&package-manager=npm_and_yarn&previous-version=18.2.66&new-version=18.2.67)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index ad2c8f6623..1b888ea97e 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,7 +15,7 @@ "swagger-ui-react": "^5.12.0" }, "devDependencies": { - "@types/react": "^18.2.66", + "@types/react": "^18.2.67", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.1.0", @@ -2357,9 +2357,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.66", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.66.tgz", - "integrity": "sha512-OYTmMI4UigXeFMF/j4uv0lBBEbongSgptPrHBxqME44h9+yNov+oL6Z3ocJKo0WyXR84sQUNeyIp9MRfckvZpg==", + "version": "18.2.67", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.67.tgz", + "integrity": "sha512-vkIE2vTIMHQ/xL0rgmuoECBCkZFZeHr49HeWSc24AptMbNRo7pwSBvj73rlJJs9fGKj0koS+V7kQB1jHS0uCgw==", "devOptional": true, "dependencies": { "@types/prop-types": "*", diff --git a/playground/package.json b/playground/package.json index 0445d27a75..fb8253684e 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,7 +17,7 @@ "swagger-ui-react": "^5.12.0" }, "devDependencies": { - "@types/react": "^18.2.66", + "@types/react": "^18.2.67", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.1.0", From 839b50523aa9fd26b0ca27e996031e1963842169 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Wed, 20 Mar 2024 16:58:32 -0400 Subject: [PATCH 09/49] feat: Add Defra-Lens support for branching schema (#2421) ## Relevant issue(s) Resolves #2293 ## Description Adds Defra-Lens support for branching schema. Defra will now be able to handle migrations for a branching schema version DAG. --- lens/fetcher.go | 2 +- lens/history.go | 206 ++++++++---------- lens/lens.go | 23 +- .../query/with_p2p_schema_branch_test.go | 137 ++++++++++++ .../query/with_schema_branch_test.go | 107 +++++++++ tests/integration/utils2.go | 1 - 6 files changed, 346 insertions(+), 130 deletions(-) create mode 100644 tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go create mode 100644 tests/integration/schema/migrations/query/with_schema_branch_test.go diff --git a/lens/fetcher.go b/lens/fetcher.go index 1e093f3966..90c80c78fb 100644 --- a/lens/fetcher.go +++ b/lens/fetcher.go @@ -76,7 +76,7 @@ func (f *lensedFetcher) Init( f.fieldDescriptionsByName[defFields[i].Name] = defFields[i] } - history, err := getTargetedSchemaHistory(ctx, txn, f.col.Schema().Root, f.col.Schema().VersionID) + history, err := getTargetedCollectionHistory(ctx, txn, f.col.Schema().Root, f.col.Schema().VersionID) if err != nil { return err } diff --git a/lens/history.go b/lens/history.go index a7a5ee57d8..3bf1a28ac8 100644 --- a/lens/history.go +++ b/lens/history.go @@ -20,185 +20,159 @@ import ( "github.com/sourcenetwork/defradb/db/description" ) -// schemaHistoryLink represents an item in a particular schema's history, it +// collectionHistoryLink represents an item in a particular collection's schema history, it // links to the previous and next version items if they exist. -type schemaHistoryLink struct { +type collectionHistoryLink struct { // The collection as this point in history. collection *client.CollectionDescription - // The history link to the next schema versions, if there are some + // The history link to the next collection versions, if there are some // (for the most recent schema version this will be empty). - next []*schemaHistoryLink + next []*collectionHistoryLink - // The history link to the previous schema versions, if there are - // some (for the initial schema version this will be empty). - previous []*schemaHistoryLink + // The history link to the previous collection versions, if there are + // some (for the initial collection version this will be empty). + previous []*collectionHistoryLink } -// targetedSchemaHistoryLink represents an item in a particular schema's history, it -// links to the previous and next version items if they exist. -// -// It also contains a vector which describes the distance and direction to the -// target schema version (given as an input param on construction). -type targetedSchemaHistoryLink struct { +// targetedCollectionHistoryLink represents an item in a particular collection's schema history, it +// links to the previous and next version items if they exist and are on the path to +// the target schema version. +type targetedCollectionHistoryLink struct { // The collection as this point in history. collection *client.CollectionDescription - // The link to next schema version, if there is one - // (for the most recent schema version this will be None). - next immutable.Option[*targetedSchemaHistoryLink] - - // The link to the previous schema version, if there is - // one (for the initial schema version this will be None). - previous immutable.Option[*targetedSchemaHistoryLink] + // The link to next collection version, if there is one + // (for the most recent collection version this will be None). + next immutable.Option[*targetedCollectionHistoryLink] - // The distance and direction from this history item to the target. - // - // A zero value indicates that this is the target item. A positive value - // indicates that the target is more recent. A negative value indicates - // that the target predates this history item. - targetVector int + // The link to the previous collection version, if there is + // one (for the initial collection version this will be None). + previous immutable.Option[*targetedCollectionHistoryLink] } -// getTargetedSchemaHistory returns the history of the schema of the given id, relative +// getTargetedCollectionHistory returns the history of the schema of the given id, relative // to the given target schema version id. // -// This includes any history items that are only known via registered -// schema migrations. -func getTargetedSchemaHistory( +// This includes any history items that are only known via registered schema migrations. +func getTargetedCollectionHistory( ctx context.Context, txn datastore.Txn, schemaRoot string, targetSchemaVersionID string, -) (map[schemaVersionID]*targetedSchemaHistoryLink, error) { - history, err := getSchemaHistory(ctx, txn, schemaRoot) +) (map[schemaVersionID]*targetedCollectionHistoryLink, error) { + history, err := getCollectionHistory(ctx, txn, schemaRoot) if err != nil { return nil, err } - result := map[schemaVersionID]*targetedSchemaHistoryLink{} - - for _, item := range history { - result[item.collection.SchemaVersionID] = &targetedSchemaHistoryLink{ - collection: item.collection, - } + targetHistoryItem, ok := history[targetSchemaVersionID] + if !ok { + // If the target schema version is unknown then there are no possible migrations + // that we can do. + return nil, nil } - for _, item := range result { - schemaHistoryLink := history[item.collection.ID] - nextHistoryItems := schemaHistoryLink.next - if len(nextHistoryItems) == 0 { - continue - } + result := map[schemaVersionID]*targetedCollectionHistoryLink{} - // WARNING: This line assumes that each collection can only have a single source, and so - // just takes the first item. If/when collections can have multiple sources we will need to change - // this slightly. - nextItem := result[nextHistoryItems[0].collection.SchemaVersionID] - item.next = immutable.Some(nextItem) - nextItem.previous = immutable.Some(item) + targetLink := &targetedCollectionHistoryLink{ + collection: targetHistoryItem.collection, } + result[targetLink.collection.SchemaVersionID] = targetLink - orphanSchemaVersions := map[string]struct{}{} + linkForwards(targetLink, targetHistoryItem, result) + linkBackwards(targetLink, targetHistoryItem, result) - for schemaVersion, item := range result { - if item.collection.SchemaVersionID == targetSchemaVersionID { - continue - } - if item.targetVector != 0 { + return result, nil +} + +// linkForwards traverses and links the history forwards from the given starting point. +// +// Forward collection versions found will in turn be linked both forwards and backwards, allowing +// branches to be correctly mapped to the target schema version. +func linkForwards( + currentLink *targetedCollectionHistoryLink, + currentHistoryItem *collectionHistoryLink, + result map[schemaVersionID]*targetedCollectionHistoryLink, +) { + for _, nextHistoryItem := range currentHistoryItem.next { + if _, ok := result[nextHistoryItem.collection.SchemaVersionID]; ok { + // As the history forms a DAG, this should only ever happen when + // iterating through the item we were at immediately before the current. continue } - distanceTravelled := 0 - currentItem := item - wasFound := false - for { - if !currentItem.next.HasValue() { - break - } - - currentItem = currentItem.next.Value() - distanceTravelled++ - if currentItem.targetVector != 0 { - distanceTravelled += currentItem.targetVector - wasFound = true - break - } - if currentItem.collection.SchemaVersionID == targetSchemaVersionID { - wasFound = true - break - } + nextLink := &targetedCollectionHistoryLink{ + collection: nextHistoryItem.collection, + previous: immutable.Some(currentLink), } + result[nextLink.collection.SchemaVersionID] = nextLink - if !wasFound { - // The target was not found going up the chain, try looking back. - // This is important for downgrading schema versions. - for { - if !currentItem.previous.HasValue() { - break - } - - currentItem = currentItem.previous.Value() - distanceTravelled-- - if currentItem.targetVector != 0 { - distanceTravelled += currentItem.targetVector - wasFound = true - break - } - if currentItem.collection.SchemaVersionID == targetSchemaVersionID { - wasFound = true - break - } - } - } + linkForwards(nextLink, nextHistoryItem, result) + linkBackwards(nextLink, nextHistoryItem, result) + } +} - if !wasFound { - // This may happen if users define schema migrations to unknown schema versions - // with no migration path to known schema versions, esentially creating orphan - // migrations. These may become linked later and should remain persisted in the - // database, but we can drop them from the history here/now. - orphanSchemaVersions[schemaVersion] = struct{}{} +// linkBackwards traverses and links the history backwards from the given starting point. +// +// Backward collection versions found will in turn be linked both forwards and backwards, allowing +// branches to be correctly mapped to the target schema version. +func linkBackwards( + currentLink *targetedCollectionHistoryLink, + currentHistoryItem *collectionHistoryLink, + result map[schemaVersionID]*targetedCollectionHistoryLink, +) { + for _, prevHistoryItem := range currentHistoryItem.previous { + if _, ok := result[prevHistoryItem.collection.SchemaVersionID]; ok { + // As the history forms a DAG, this should only ever happen when + // iterating through the item we were at immediately before the current. continue } - item.targetVector = distanceTravelled - } + prevLink := &targetedCollectionHistoryLink{ + collection: prevHistoryItem.collection, + next: immutable.Some(currentLink), + } + result[prevLink.collection.SchemaVersionID] = prevLink - for schemaVersion := range orphanSchemaVersions { - delete(result, schemaVersion) + linkForwards(prevLink, prevHistoryItem, result) + linkBackwards(prevLink, prevHistoryItem, result) } - - return result, nil } -// getSchemaHistory returns the history of the schema of the given id as linked list +// getCollectionHistory returns the history of the collection of the given root id as linked list // with each item mapped by schema version id. // -// This includes any history items that are only known via registered -// schema migrations. -func getSchemaHistory( +// This includes any history items that are only known via registered schema migrations. +func getCollectionHistory( ctx context.Context, txn datastore.Txn, schemaRoot string, -) (map[collectionID]*schemaHistoryLink, error) { +) (map[schemaVersionID]*collectionHistoryLink, error) { cols, err := description.GetCollectionsBySchemaRoot(ctx, txn, schemaRoot) if err != nil { return nil, err } - history := map[collectionID]*schemaHistoryLink{} + history := map[schemaVersionID]*collectionHistoryLink{} + schemaVersionsByColID := map[uint32]schemaVersionID{} for _, c := range cols { + // Todo - this `col := c` can be removed with Go 1.22: + // https://github.com/sourcenetwork/defradb/issues/2431 col := c + // Convert the temporary types to the cleaner return type: - history[col.ID] = &schemaHistoryLink{ + history[col.SchemaVersionID] = &collectionHistoryLink{ collection: &col, } + schemaVersionsByColID[col.ID] = col.SchemaVersionID } for _, historyItem := range history { for _, source := range historyItem.collection.CollectionSources() { - src := history[source.SourceCollectionID] + srcSchemaVersion := schemaVersionsByColID[source.SourceCollectionID] + src := history[srcSchemaVersion] historyItem.previous = append( historyItem.next, src, diff --git a/lens/lens.go b/lens/lens.go index 4e700d7324..1a42bdf972 100644 --- a/lens/lens.go +++ b/lens/lens.go @@ -19,7 +19,6 @@ import ( ) type schemaVersionID = string -type collectionID = uint32 // LensDoc represents a document that will be sent to/from a Lens. type LensDoc = map[string]any @@ -57,7 +56,7 @@ type lens struct { outputPipe enumerable.Concatenation[LensDoc] unknownVersionPipe enumerable.Queue[LensDoc] - schemaVersionHistory map[schemaVersionID]*targetedSchemaHistoryLink + collectionHistory map[schemaVersionID]*targetedCollectionHistoryLink source enumerable.Queue[lensInput] } @@ -68,18 +67,18 @@ func new( ctx context.Context, lensRegistry client.LensRegistry, targetSchemaVersionID schemaVersionID, - schemaVersionHistory map[schemaVersionID]*targetedSchemaHistoryLink, + collectionHistory map[schemaVersionID]*targetedCollectionHistoryLink, ) Lens { targetSource := enumerable.NewQueue[LensDoc]() outputPipe := enumerable.Concat[LensDoc](targetSource) return &lens{ - lensRegistry: lensRegistry, - ctx: ctx, - source: enumerable.NewQueue[lensInput](), - outputPipe: outputPipe, - unknownVersionPipe: targetSource, - schemaVersionHistory: schemaVersionHistory, + lensRegistry: lensRegistry, + ctx: ctx, + source: enumerable.NewQueue[lensInput](), + outputPipe: outputPipe, + unknownVersionPipe: targetSource, + collectionHistory: collectionHistory, lensInputPipesBySchemaVersionIDs: map[schemaVersionID]enumerable.Queue[LensDoc]{ targetSchemaVersionID: targetSource, }, @@ -137,7 +136,7 @@ func (l *lens) Next() (bool, error) { // up to the output via any intermediary pipes. inputPipe = p } else { - historyLocation, ok := l.schemaVersionHistory[doc.SchemaVersionID] + historyLocation, ok := l.collectionHistory[doc.SchemaVersionID] if !ok { // We may recieve documents of unknown schema versions, they should // still be fed through the pipe system in order to preserve order. @@ -178,7 +177,7 @@ func (l *lens) Next() (bool, error) { break } - if historyLocation.targetVector > 0 { + if historyLocation.next.HasValue() { // Aquire a lens migration from the registery, using the junctionPipe as its source. // The new pipeHead will then be connected as a source to the next migration-stage on // the next loop. @@ -188,7 +187,7 @@ func (l *lens) Next() (bool, error) { } historyLocation = historyLocation.next.Value() - } else { + } else if historyLocation.previous.HasValue() { // Aquire a lens migration from the registery, using the junctionPipe as its source. // The new pipeHead will then be connected as a source to the next migration-stage on // the next loop. diff --git a/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go new file mode 100644 index 0000000000..9aba1698e1 --- /dev/null +++ b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go @@ -0,0 +1,137 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package query + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestSchemaMigrationQueryWithP2PReplicatedDocOnOtherSchemaBranch(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + verified: Boolean + } + `, + }, + testUtils.SchemaPatch{ + // Patch first node only + NodeID: immutable.Some(0), + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": "String"} } + ] + `, + }, + testUtils.ConfigureMigration{ + // Register the migration on both nodes. + LensConfig: client.LensConfig{ + SourceSchemaVersionID: "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy", + DestinationSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "name", + "value": "Fred", + }, + }, + }, + }, + }, + }, + testUtils.SchemaPatch{ + // Patch second node with different patch + NodeID: immutable.Some(1), + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "phone", "Kind": 11} } + ] + `, + Lens: immutable.Some(model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "phone", + "value": "1234567890", + }, + }, + }, + }), + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.CreateDoc{ + // Create John on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "name": "John", + "verified": true + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + name + verified + } + }`, + Results: []map[string]any{ + { + "name": "John", + "verified": true, + }, + }, + }, + testUtils.Request{ + // Node 1 should yield results migrated down to schema version 1, then up to schema version 3. + NodeID: immutable.Some(1), + Request: ` + query { + Users { + name + phone + } + } + `, + Results: []map[string]any{ + { + // name has been cleared by the inverse of the migration from version 1 to 2 + "name": nil, + // phone has been set by the migration from version 1 to 3 + "phone": "1234567890", + "verified": true, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/migrations/query/with_schema_branch_test.go b/tests/integration/schema/migrations/query/with_schema_branch_test.go new file mode 100644 index 0000000000..fe882944ee --- /dev/null +++ b/tests/integration/schema/migrations/query/with_schema_branch_test.go @@ -0,0 +1,107 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package query + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestSchemaMigrationQuery_WithBranchingSchema(t *testing.T) { + schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + + test := testUtils.TestCase{ + Description: "Test schema update, with branching schema migrations", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + SetAsDefaultVersion: immutable.Some(true), + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": 11} } + ] + `, + Lens: immutable.Some(model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "name", + "value": "Fred", + }, + }, + }, + }), + }, + testUtils.CreateDoc{ + // Create a document on the second schema version, with an email field value + Doc: `{ + "name": "John", + "email": "john@source.hub" + }`, + }, + testUtils.SetActiveSchemaVersion{ + // Set the active schema version back to the first + SchemaVersionID: schemaVersion1ID, + }, + testUtils.SchemaPatch{ + // The third schema version will be set as the active version, going from version 1 to 3 + SetAsDefaultVersion: immutable.Some(true), + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "phone", "Kind": 11} } + ] + `, + Lens: immutable.Some(model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "phone", + "value": "1234567890", + }, + }, + }, + }), + }, + testUtils.Request{ + Request: ` + query { + Users { + name + phone + } + } + `, + Results: []map[string]any{ + { + // name has been cleared by the inverse of the migration from version 1 to 2 + "name": nil, + // phone has been set by the migration from version 1 to 3 + "phone": "1234567890", + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 830a76ba2d..930b429119 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -746,7 +746,6 @@ func refreshCollections( for _, collection := range allCollections { if collection.Name().Value() == collectionName { s.collections[nodeID][i] = collection - println(collection.Description().SchemaVersionID) break } } From 75cf5ad74cc2dc571d43d40c04f24476ea2d380d Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Wed, 20 Mar 2024 18:01:24 -0400 Subject: [PATCH 10/49] feat: Allow mutation of col sources via PatchCollection (#2424) ## Relevant issue(s) Resolves #2422 ## Description Allows the mutation of collection sources via `PatchCollection`. Users can now set view and migration Lens transforms, and update view queries via json patch. Validation is non-existant RE the Query struct, as it is in develop branch. An invalid config will result in errors when querying the view. I think we can improve on this later, this PR does however increase the likelihood of invalid view queries being constructed (by users). It also massively increases the exposure of the `client/request` package, which whilst already public, is quite poorly documented. Similar to validation, this PR increases the priority of documenting it. --- db/collection.go | 76 +++++++++- db/errors.go | 19 ++- .../updates/add/sources_test.go | 2 +- .../remove/col_source_transform_test.go | 80 ++++++++++ .../replace/col_source_source_id_test.go | 53 +++++++ .../replace/col_source_transform_test.go | 88 +++++++++++ .../updates/replace/id_test.go | 2 +- .../replace/query_source_query_test.go | 141 ++++++++++++++++++ .../replace/query_source_transform_test.go | 113 ++++++++++++++ .../updates/replace/sources_test.go | 24 ++- 10 files changed, 585 insertions(+), 13 deletions(-) create mode 100644 tests/integration/collection_description/updates/remove/col_source_transform_test.go create mode 100644 tests/integration/collection_description/updates/replace/col_source_source_id_test.go create mode 100644 tests/integration/collection_description/updates/replace/col_source_transform_test.go create mode 100644 tests/integration/collection_description/updates/replace/query_source_query_test.go create mode 100644 tests/integration/collection_description/updates/replace/query_source_transform_test.go diff --git a/db/collection.go b/db/collection.go index b1fc102943..49bdf01e71 100644 --- a/db/collection.go +++ b/db/collection.go @@ -572,6 +572,47 @@ func (db *db) patchCollection( if err != nil { return err } + + existingCol, ok := existingColsByID[col.ID] + if ok { + // Clear any existing migrations in the registry, using this semi-hacky way + // to avoid adding more functions to a public interface that we wish to remove. + + for _, src := range existingCol.CollectionSources() { + if src.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, existingCol.ID, model.Lens{}) + if err != nil { + return err + } + } + } + for _, src := range existingCol.QuerySources() { + if src.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, existingCol.ID, model.Lens{}) + if err != nil { + return err + } + } + } + } + + for _, src := range col.CollectionSources() { + if src.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, col.ID, src.Transform.Value()) + if err != nil { + return err + } + } + } + + for _, src := range col.QuerySources() { + if src.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, col.ID, src.Transform.Value()) + if err != nil { + return err + } + } + } } return db.loadSchema(ctx, txn) @@ -583,7 +624,7 @@ var patchCollectionValidators = []func( ) error{ validateCollectionNameUnique, validateSingleVersionActive, - validateSourcesNotModified, + validateSourcesNotRedefined, validateIndexesNotModified, validateFieldsNotModified, validateIDNotZero, @@ -646,7 +687,12 @@ func validateSingleVersionActive( return nil } -func validateSourcesNotModified( +// validateSourcesNotRedefined specifies the limitations on how the collection sources +// can be mutated. +// +// Currently new sources cannot be added, existing cannot be removed, and CollectionSources +// cannot be redirected to other collections. +func validateSourcesNotRedefined( oldColsByID map[uint32]client.CollectionDescription, newColsByID map[uint32]client.CollectionDescription, ) error { @@ -656,10 +702,28 @@ func validateSourcesNotModified( continue } - // DeepEqual is temporary, as this validation is temporary, for example soon - // users will be able to be able to change the migration - if !reflect.DeepEqual(oldCol.Sources, newCol.Sources) { - return NewErrCollectionSourcesCannotBeMutated(newCol.ID) + newColSources := newCol.CollectionSources() + oldColSources := oldCol.CollectionSources() + + if len(newColSources) != len(oldColSources) { + return NewErrCollectionSourcesCannotBeAddedRemoved(newCol.ID) + } + + for i := range newColSources { + if newColSources[i].SourceCollectionID != oldColSources[i].SourceCollectionID { + return NewErrCollectionSourceIDMutated( + newCol.ID, + newColSources[i].SourceCollectionID, + oldColSources[i].SourceCollectionID, + ) + } + } + + newQuerySources := newCol.QuerySources() + oldQuerySources := oldCol.QuerySources() + + if len(newQuerySources) != len(oldQuerySources) { + return NewErrCollectionSourcesCannotBeAddedRemoved(newCol.ID) } } diff --git a/db/errors.go b/db/errors.go index bda5154f79..c32da44671 100644 --- a/db/errors.go +++ b/db/errors.go @@ -85,7 +85,8 @@ const ( errInvalidViewQuery string = "the query provided is not valid as a View" errCollectionAlreadyExists string = "collection already exists" errMultipleActiveCollectionVersions string = "multiple versions of same collection cannot be active" - errCollectionSourcesCannotBeMutated string = "collection sources cannot be mutated" + errCollectionSourcesCannotBeAddedRemoved string = "collection sources cannot be added or removed" + errCollectionSourceIDMutated string = "collection source ID cannot be mutated" errCollectionIndexesCannotBeMutated string = "collection indexes cannot be mutated" errCollectionFieldsCannotBeMutated string = "collection fields cannot be mutated" errCollectionRootIDCannotBeMutated string = "collection root ID cannot be mutated" @@ -113,7 +114,8 @@ var ( ErrInvalidViewQuery = errors.New(errInvalidViewQuery) ErrCanNotIndexNonUniqueFields = errors.New(errCanNotIndexNonUniqueFields) ErrMultipleActiveCollectionVersions = errors.New(errMultipleActiveCollectionVersions) - ErrCollectionSourcesCannotBeMutated = errors.New(errCollectionSourcesCannotBeMutated) + ErrCollectionSourcesCannotBeAddedRemoved = errors.New(errCollectionSourcesCannotBeAddedRemoved) + ErrCollectionSourceIDMutated = errors.New(errCollectionSourceIDMutated) ErrCollectionIndexesCannotBeMutated = errors.New(errCollectionIndexesCannotBeMutated) ErrCollectionFieldsCannotBeMutated = errors.New(errCollectionFieldsCannotBeMutated) ErrCollectionRootIDCannotBeMutated = errors.New(errCollectionRootIDCannotBeMutated) @@ -573,13 +575,22 @@ func NewErrMultipleActiveCollectionVersions(name string, root uint32) error { ) } -func NewErrCollectionSourcesCannotBeMutated(colID uint32) error { +func NewErrCollectionSourcesCannotBeAddedRemoved(colID uint32) error { return errors.New( - errCollectionSourcesCannotBeMutated, + errCollectionSourcesCannotBeAddedRemoved, errors.NewKV("CollectionID", colID), ) } +func NewErrCollectionSourceIDMutated(colID uint32, newSrcID uint32, oldSrcID uint32) error { + return errors.New( + errCollectionSourceIDMutated, + errors.NewKV("CollectionID", colID), + errors.NewKV("NewCollectionSourceID", newSrcID), + errors.NewKV("OldCollectionSourceID", oldSrcID), + ) +} + func NewErrCollectionIndexesCannotBeMutated(colID uint32) error { return errors.New( errCollectionIndexesCannotBeMutated, diff --git a/tests/integration/collection_description/updates/add/sources_test.go b/tests/integration/collection_description/updates/add/sources_test.go index 37010aa15c..c58ff4a660 100644 --- a/tests/integration/collection_description/updates/add/sources_test.go +++ b/tests/integration/collection_description/updates/add/sources_test.go @@ -30,7 +30,7 @@ func TestColDescrUpdateAddSources_Errors(t *testing.T) { { "op": "add", "path": "/1/Sources/-", "value": {"SourceCollectionID": 1} } ] `, - ExpectedError: "collection sources cannot be mutated. CollectionID: 1", + ExpectedError: "collection sources cannot be added or removed. CollectionID: 1", }, }, } diff --git a/tests/integration/collection_description/updates/remove/col_source_transform_test.go b/tests/integration/collection_description/updates/remove/col_source_transform_test.go new file mode 100644 index 0000000000..73179c16b0 --- /dev/null +++ b/tests/integration/collection_description/updates/remove/col_source_transform_test.go @@ -0,0 +1,80 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package remove + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestColDescrUpdateRemoveCollectionSourceTransform(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": 11} } + ] + `, + Lens: immutable.Some(model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "name", + "value": "Fred", + }, + }, + }, + }), + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/2/Sources/0/Transform" } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + // If the transform was not removed, `"Fred"` would have been returned + Results: []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/col_source_source_id_test.go b/tests/integration/collection_description/updates/replace/col_source_source_id_test.go new file mode 100644 index 0000000000..3ca1a749f6 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/col_source_source_id_test.go @@ -0,0 +1,53 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceCollectionSourceSourceCollectionID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": 11} } + ] + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/2/Sources/0/SourceCollectionID", "value": 3 } + ] + `, + ExpectedError: "collection source ID cannot be mutated. CollectionID: 2, NewCollectionSourceID: 3, OldCollectionSourceID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/col_source_transform_test.go b/tests/integration/collection_description/updates/replace/col_source_transform_test.go new file mode 100644 index 0000000000..b933dcd2ed --- /dev/null +++ b/tests/integration/collection_description/updates/replace/col_source_transform_test.go @@ -0,0 +1,88 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/stretchr/testify/require" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestColDescrUpdateReplaceCollectionSourceTransform(t *testing.T) { + transformCfgJson, err := json.Marshal( + model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "name", + "value": "Fred", + }, + }, + }, + }, + ) + require.NoError(t, err) + + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": 11} } + ] + `, + }, + testUtils.PatchCollection{ + Patch: fmt.Sprintf(` + [ + { "op": "replace", "path": "/2/Sources/0/Transform", "value": %s } + ] + `, + transformCfgJson, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + } + }`, + // Without the new transform, `"Shahzad"` would have been returned + Results: []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/id_test.go b/tests/integration/collection_description/updates/replace/id_test.go index b83c634385..a89dad193b 100644 --- a/tests/integration/collection_description/updates/replace/id_test.go +++ b/tests/integration/collection_description/updates/replace/id_test.go @@ -87,7 +87,7 @@ func TestColDescrUpdateReplaceID_WithExistingSameRoot_Errors(t *testing.T) { { "op": "replace", "path": "/2/ID", "value": 1 } ] `, - ExpectedError: "collection sources cannot be mutated.", + ExpectedError: "collection sources cannot be added or removed.", }, }, } diff --git a/tests/integration/collection_description/updates/replace/query_source_query_test.go b/tests/integration/collection_description/updates/replace/query_source_query_test.go new file mode 100644 index 0000000000..789f4b2d7b --- /dev/null +++ b/tests/integration/collection_description/updates/replace/query_source_query_test.go @@ -0,0 +1,141 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceQuerySourceQuery(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Books { + name: String + } + `, + }, + testUtils.CreateView{ + // Create the view on the `Books` collection + Query: ` + Books { + name + } + `, + SDL: ` + type View { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.PatchCollection{ + // Patch the view query definition so that it now queries the `Users` collection + Patch: ` + [ + { "op": "replace", "path": "/3/Sources/0/Query", "value": {"Name": "Users", "Fields":[{"Name":"name"}]} } + ] + `, + }, + testUtils.Request{ + Request: `query { + View { + name + } + }`, + // If the view was still querying `Books` there would be no results + Results: []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceQuerySourceQueryName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Books { + name: String + } + `, + }, + testUtils.CreateView{ + // Create the view on the `Books` collection + Query: ` + Books { + name + } + `, + SDL: ` + type View { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.PatchCollection{ + // Patch the view query definition so that it now queries the `Users` collection + Patch: ` + [ + { "op": "replace", "path": "/3/Sources/0/Query/Name", "value": "Users" } + ] + `, + }, + testUtils.Request{ + Request: `query { + View { + name + } + }`, + // If the view was still querying `Books` there would be no results + Results: []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/query_source_transform_test.go b/tests/integration/collection_description/updates/replace/query_source_transform_test.go new file mode 100644 index 0000000000..89a2598010 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/query_source_transform_test.go @@ -0,0 +1,113 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/require" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestColDescrUpdateReplaceQuerySourceTransform(t *testing.T) { + newTransformCfgJson, err := json.Marshal( + model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.CopyModulePath, + Arguments: map[string]any{ + "src": "lastName", + "dst": "fullName", + }, + }, + }, + }, + ) + require.NoError(t, err) + + test := testUtils.TestCase{ + Description: "Simple view with transform", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + firstName: String + lastName: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + firstName + lastName + } + `, + SDL: ` + type UserView { + fullName: String + } + `, + Transform: immutable.Some(model.Lens{ + // This transform will copy the value from `firstName` into the `fullName` field, + // like an overly-complicated alias + Lenses: []model.LensModule{ + { + Path: lenses.CopyModulePath, + Arguments: map[string]any{ + "src": "firstName", + "dst": "fullName", + }, + }, + }, + }), + }, + testUtils.PatchCollection{ + Patch: fmt.Sprintf(` + [ + { "op": "replace", "path": "/2/Sources/0/Transform", "value": %s } + ] + `, + newTransformCfgJson, + ), + }, + testUtils.CreateDoc{ + // Set the `name` field only + Doc: `{ + "firstName": "John", + "lastName": "S" + }`, + }, + testUtils.Request{ + Request: ` + query { + UserView { + fullName + } + } + `, + Results: []map[string]any{ + { + "fullName": "S", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/sources_test.go b/tests/integration/collection_description/updates/replace/sources_test.go index 2d06e01d4a..2f6bf7ca69 100644 --- a/tests/integration/collection_description/updates/replace/sources_test.go +++ b/tests/integration/collection_description/updates/replace/sources_test.go @@ -30,7 +30,29 @@ func TestColDescrUpdateReplaceSources_Errors(t *testing.T) { { "op": "replace", "path": "/1/Sources", "value": [{"SourceCollectionID": 1}] } ] `, - ExpectedError: "collection sources cannot be mutated. CollectionID: 1", + ExpectedError: "collection sources cannot be added or removed. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceSourcesWithQuerySource_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Sources", "value": [{"Query": {"Name": "Users"}}] } + ] + `, + ExpectedError: "collection sources cannot be added or removed. CollectionID: 1", }, }, } From f253317fc57ccbd4a53d00b6ddd217cb0cebd895 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 21 Mar 2024 19:12:10 -0400 Subject: [PATCH 11/49] refactor: Rewrite convertImmutable (#2445) ## Relevant issue(s) Resolves #2444 ## Description Rewrites convertImmutable. --- client/value.go | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/client/value.go b/client/value.go index 261535d8d2..a6719a8479 100644 --- a/client/value.go +++ b/client/value.go @@ -79,13 +79,11 @@ func (val FieldValue) Bytes() ([]byte, error) { } func convertImmutable[T any](vals []immutable.Option[T]) []any { - var out []any - for _, val := range vals { - if !val.HasValue() { - out = append(out, nil) - continue + out := make([]any, len(vals)) + for i := range vals { + if vals[i].HasValue() { + out[i] = vals[i].Value() } - out = append(out, val.Value()) } return out } From 89caf6df70ba709916109cf769a6353dbef982ac Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Fri, 22 Mar 2024 13:58:12 -0400 Subject: [PATCH 12/49] refactor: Clean up client/request package (#2443) ## Relevant issue(s) Resolves #2425 ## Description Cleans up client/request package, removing some extra stuff, and documenting most of the remaining. --- client/request/aggregate.go | 37 ++++++-- client/request/cid.go | 25 ++++++ client/request/commit.go | 43 +++++---- client/request/doc_ids.go | 21 +++++ client/request/field.go | 8 +- client/request/filter.go | 10 +++ client/request/group.go | 16 ++++ client/request/limit.go | 20 +++++ client/request/mutation.go | 29 +++--- client/request/offset.go | 22 +++++ client/request/order.go | 10 +++ client/request/select.go | 112 +++++++++++++---------- client/request/subscription.go | 19 ++-- db/collection_update.go | 8 +- planner/mapper/mapper.go | 118 ++++++++++++++++++------- planner/planner.go | 2 +- planner/view.go | 2 +- request/graphql/parser/commit.go | 4 +- request/graphql/parser/mutation.go | 6 +- request/graphql/parser/query.go | 28 +++--- request/graphql/parser/request.go | 9 +- request/graphql/parser/subscription.go | 2 +- 22 files changed, 400 insertions(+), 151 deletions(-) create mode 100644 client/request/cid.go create mode 100644 client/request/doc_ids.go create mode 100644 client/request/limit.go create mode 100644 client/request/offset.go diff --git a/client/request/aggregate.go b/client/request/aggregate.go index 902134b258..fa7188977e 100644 --- a/client/request/aggregate.go +++ b/client/request/aggregate.go @@ -10,20 +10,43 @@ package request -import immutables "github.com/sourcenetwork/immutable" +import "github.com/sourcenetwork/immutable" +// Aggregate represents an aggregate operation upon a set of child properties. +// +// Which aggregate this represents (e.g. _count, _avg, etc.) is determined by its +// [Name] property. type Aggregate struct { Field + // Targets hosts the properties to aggregate. + // + // When multiple properties are selected, their values will be gathered into a single set + // upon which the aggregate will be performed. For example, if this aggregate represents + // and average of the Friends.Age and Parents.Age fields, the result will be the average + // age of all their friends and parents, it will not be an average of their average ages. Targets []*AggregateTarget } +// AggregateTarget represents the target of an [Aggregate]. type AggregateTarget struct { - HostName string - ChildName immutables.Option[string] + Limitable + Offsetable + Orderable + Filterable + + // HostName is the name of the immediate field on the object hosting the aggregate. + // + // For example if averaging Friends.Age on the User collection, this property would be + // "Friends". + HostName string - Limit immutables.Option[uint64] - Offset immutables.Option[uint64] - OrderBy immutables.Option[OrderBy] - Filter immutables.Option[Filter] + // ChildName is the name of the child field on the object navigated to via [HostName]. + // + // It is optional, for example when counting the number of Friends on User, or when aggregating + // scalar arrays, this value will be None. + // + // When averaging Friends.Age on the User collection, this property would be + // "Age". + ChildName immutable.Option[string] } diff --git a/client/request/cid.go b/client/request/cid.go new file mode 100644 index 0000000000..42707d0247 --- /dev/null +++ b/client/request/cid.go @@ -0,0 +1,25 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package request + +import "github.com/sourcenetwork/immutable" + +// CIDFilter is an embeddable struct that hosts a consistent set of properties +// for filtering an aspect of a request by commit CID. +type CIDFilter struct { + // CID is an optional value that selects a single document at the given commit CID + // for processing by the request. + // + // If a commit matching the given CID is not found an error will be returned. The commit + // does not need to be the latest, and this property allows viewing of the document at + // prior revisions. + CID immutable.Option[string] +} diff --git a/client/request/commit.go b/client/request/commit.go index ff65e20822..e44dabf794 100644 --- a/client/request/commit.go +++ b/client/request/commit.go @@ -16,20 +16,34 @@ var ( _ Selection = (*CommitSelect)(nil) ) +// CommitSelect represents the selection of database commits to Defra documents. type CommitSelect struct { Field + ChildSelect - DocID immutable.Option[string] - FieldID immutable.Option[string] - Cid immutable.Option[string] - Depth immutable.Option[uint64] + CIDFilter + + Limitable + Offsetable + Orderable + Groupable - Limit immutable.Option[uint64] - Offset immutable.Option[uint64] - OrderBy immutable.Option[OrderBy] - GroupBy immutable.Option[GroupBy] + // DocID is an optional filter which when provided will limit commits to those + // belonging to the given document. + DocID immutable.Option[string] + + // FieldID is an optional filter which when provided will limit commits to those + // belonging to the given field. + // + // `C` may be provided for document-level (composite) commits. + FieldID immutable.Option[string] - Fields []Selection + // Depth limits the returned commits to being X places in the history away from the + // most current. + // + // For example if a document has been updated 5 times, and a depth of 2 is provided + // only commits for the last two updates will be returned. + Depth immutable.Option[uint64] } func (c CommitSelect) ToSelect() *Select { @@ -38,11 +52,10 @@ func (c CommitSelect) ToSelect() *Select { Name: c.Name, Alias: c.Alias, }, - Limit: c.Limit, - Offset: c.Offset, - OrderBy: c.OrderBy, - GroupBy: c.GroupBy, - Fields: c.Fields, - Root: CommitSelection, + Limitable: c.Limitable, + Offsetable: c.Offsetable, + Orderable: c.Orderable, + Groupable: c.Groupable, + ChildSelect: c.ChildSelect, } } diff --git a/client/request/doc_ids.go b/client/request/doc_ids.go new file mode 100644 index 0000000000..24089d2032 --- /dev/null +++ b/client/request/doc_ids.go @@ -0,0 +1,21 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package request + +import "github.com/sourcenetwork/immutable" + +// DocIDsFilter is an embeddable struct that hosts a consistent set of properties +// for filtering an aspect of a request by document IDs. +type DocIDsFilter struct { + // DocIDs is an optional value that ensures any records processed by the request + // will have one of the given document IDs. + DocIDs immutable.Option[[]string] +} diff --git a/client/request/field.go b/client/request/field.go index 578074671b..636a0d97e8 100644 --- a/client/request/field.go +++ b/client/request/field.go @@ -14,6 +14,12 @@ import "github.com/sourcenetwork/immutable" // Field implements Selection type Field struct { - Name string + // Name contains the name of the field on it's host object. + // + // For example `email` on a `User` collection, or a `_count` aggregate. + Name string + + // Alias is an optional override for Name, if provided results will be returned + // from the query using the Alias instead of the Name. Alias immutable.Option[string] } diff --git a/client/request/filter.go b/client/request/filter.go index 67a80b58e7..aabfafb9b9 100644 --- a/client/request/filter.go +++ b/client/request/filter.go @@ -10,6 +10,8 @@ package request +import "github.com/sourcenetwork/immutable" + const ( FilterOpOr = "_or" FilterOpAnd = "_and" @@ -24,3 +26,11 @@ type Filter struct { // parsed filter conditions Conditions map[string]any } + +// Filterable is an embeddable struct that hosts a consistent set of properties +// for filtering an aspect of a request. +type Filterable struct { + // OrderBy is an optional set of conditions used to filter records prior to + // being processed by the request. + Filter immutable.Option[Filter] +} diff --git a/client/request/group.go b/client/request/group.go index e2fd977a00..b38186cb3a 100644 --- a/client/request/group.go +++ b/client/request/group.go @@ -10,6 +10,22 @@ package request +import "github.com/sourcenetwork/immutable" + type GroupBy struct { Fields []string } + +// Groupable is an embeddable struct that hosts a consistent set of properties +// for grouping an aspect of a request. +type Groupable struct { + // GroupBy is an optional set of fields for which to group the contents of this + // request by. + // + // If this argument is provided, only fields used to group may be rendered in + // the immediate child selector. Additional fields may be selected by using + // the '_group' selector within the immediate child selector. If an empty set + // is provided, the restrictions mentioned still apply, although all results + // will appear within the same group. + GroupBy immutable.Option[GroupBy] +} diff --git a/client/request/limit.go b/client/request/limit.go new file mode 100644 index 0000000000..2e1b1a4ab7 --- /dev/null +++ b/client/request/limit.go @@ -0,0 +1,20 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package request + +import "github.com/sourcenetwork/immutable" + +// Limitable is an embeddable struct that hosts a consistent set of properties +// for limiting an aspect of a request. +type Limitable struct { + // Limit is an optional value that caps the number of results to the number provided. + Limit immutable.Option[uint64] +} diff --git a/client/request/mutation.go b/client/request/mutation.go index 6bff180dd9..81fcc823c9 100644 --- a/client/request/mutation.go +++ b/client/request/mutation.go @@ -10,8 +10,6 @@ package request -import "github.com/sourcenetwork/immutable" - type MutationType int const ( @@ -25,17 +23,24 @@ const ( // all the possible arguments. type ObjectMutation struct { Field + ChildSelect + + Filterable + DocIDsFilter + + // Type is the type of mutatation that this object represents. + // + // For example [CreateObjects]. Type MutationType - // Collection is the target collection name - // if this mutation is on an object. + // Collection is the target collection name. Collection string - IDs immutable.Option[[]string] - Filter immutable.Option[Filter] - Input map[string]any - - Fields []Selection + // Input is the json representation of the fieldName-value pairs of document properties + // to mutate. + // + // This is ignored for [DeleteObjects] mutations. + Input map[string]any } // ToSelect returns a basic Select object, with the same Name, Alias, and Fields as @@ -46,8 +51,8 @@ func (m ObjectMutation) ToSelect() *Select { Name: m.Collection, Alias: m.Alias, }, - Fields: m.Fields, - DocIDs: m.IDs, - Filter: m.Filter, + ChildSelect: m.ChildSelect, + DocIDsFilter: m.DocIDsFilter, + Filterable: m.Filterable, } } diff --git a/client/request/offset.go b/client/request/offset.go new file mode 100644 index 0000000000..5bb2ea723d --- /dev/null +++ b/client/request/offset.go @@ -0,0 +1,22 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package request + +import "github.com/sourcenetwork/immutable" + +// Offsetable is an embeddable struct that hosts a consistent set of properties +// for offsetting an aspect of a request. +type Offsetable struct { + // Offset is an optional value that skips the given number of results that would have + // otherwise been returned. Commonly used alongside the limit argument, + // this argument will still work on its own. + Offset immutable.Option[uint64] +} diff --git a/client/request/order.go b/client/request/order.go index 1fff3953f1..d998843959 100644 --- a/client/request/order.go +++ b/client/request/order.go @@ -10,6 +10,8 @@ package request +import "github.com/sourcenetwork/immutable" + type ( OrderDirection string @@ -29,3 +31,11 @@ type ( Conditions []OrderCondition } ) + +// Orderable is an embeddable struct that hosts a consistent set of properties +// for ordering an aspect of a request. +type Orderable struct { + // OrderBy is an optional set of field-orders which may be used to sort the results. An + // empty set will be ignored. + OrderBy immutable.Option[OrderBy] +} diff --git a/client/request/select.go b/client/request/select.go index 863bba2aeb..0365fb385b 100644 --- a/client/request/select.go +++ b/client/request/select.go @@ -12,16 +12,6 @@ package request import ( "encoding/json" - - "github.com/sourcenetwork/immutable" -) - -// SelectionType is the type of selection. -type SelectionType int - -const ( - ObjectSelection SelectionType = iota - CommitSelection ) // Select is a complex Field with strong typing. @@ -29,22 +19,29 @@ const ( // Includes fields, and request arguments like filters, limits, etc. type Select struct { Field + ChildSelect + + Limitable + Offsetable + Orderable + Filterable + DocIDsFilter + CIDFilter + Groupable + + // ShowDeleted will return deleted documents along with non-deleted ones + // if set to true. + ShowDeleted bool +} - DocIDs immutable.Option[[]string] - CID immutable.Option[string] - - // Root is the top level type of parsed request - Root SelectionType - - Limit immutable.Option[uint64] - Offset immutable.Option[uint64] - OrderBy immutable.Option[OrderBy] - GroupBy immutable.Option[GroupBy] - Filter immutable.Option[Filter] - +// ChildSelect represents a type with selectable child properties. +// +// At least one child must be selected. +type ChildSelect struct { + // Fields contains the set of child properties to return. + // + // At least one child property must be selected. Fields []Selection - - ShowDeleted bool } // Validate validates the Select. @@ -111,25 +108,20 @@ func (s *Select) validateGroupBy() []error { } // selectJson is a private object used for handling json deserialization -// of `Select` objects. +// of [Select] objects. +// +// It contains everything minus the [ChildSelect], which uses a custom UnmarshalJSON +// and is skipped over when embedding due to the way the std lib json pkg works. type selectJson struct { Field - DocIDs immutable.Option[[]string] - CID immutable.Option[string] - Root SelectionType - Limit immutable.Option[uint64] - Offset immutable.Option[uint64] - OrderBy immutable.Option[OrderBy] - GroupBy immutable.Option[GroupBy] - Filter immutable.Option[Filter] + Limitable + Offsetable + Orderable + Filterable + DocIDsFilter + CIDFilter + Groupable ShowDeleted bool - - // Properties above this line match the `Select` object and - // are deserialized using the normal/default logic. - // Properties below this line require custom logic in `UnmarshalJSON` - // in order to be deserialized correctly. - - Fields []map[string]json.RawMessage } func (s *Select) UnmarshalJSON(bytes []byte) error { @@ -142,13 +134,37 @@ func (s *Select) UnmarshalJSON(bytes []byte) error { s.Field = selectMap.Field s.DocIDs = selectMap.DocIDs s.CID = selectMap.CID - s.Root = selectMap.Root - s.Limit = selectMap.Limit - s.Offset = selectMap.Offset - s.OrderBy = selectMap.OrderBy - s.GroupBy = selectMap.GroupBy - s.Filter = selectMap.Filter + s.Limitable = selectMap.Limitable + s.Offsetable = selectMap.Offsetable + s.Orderable = selectMap.Orderable + s.Groupable = selectMap.Groupable + s.Filterable = selectMap.Filterable s.ShowDeleted = selectMap.ShowDeleted + + var childSelect ChildSelect + err = json.Unmarshal(bytes, &childSelect) + if err != nil { + return err + } + + s.ChildSelect = childSelect + + return nil +} + +// childSelectJson is a private object used for handling json deserialization +// of [ChildSelect] objects. +type childSelectJson struct { + Fields []map[string]json.RawMessage +} + +func (s *ChildSelect) UnmarshalJSON(bytes []byte) error { + var selectMap childSelectJson + err := json.Unmarshal(bytes, &selectMap) + if err != nil { + return err + } + s.Fields = make([]Selection, len(selectMap.Fields)) for i, field := range selectMap.Fields { @@ -163,8 +179,8 @@ func (s *Select) UnmarshalJSON(bytes []byte) error { // They must be non-nillable as nil values may have their keys omitted from // the json. This also relies on the fields being unique. We may wish to change // this later to custom-serialize with a `_type` property. - if _, ok := field["Root"]; ok { - // This must be a Select, as only the `Select` type has a `Root` field + if _, ok := field["Fields"]; ok { + // This must be a Select, as only the `Select` type has a `Fields` field var fieldSelect Select err := json.Unmarshal(fieldJson, &fieldSelect) if err != nil { diff --git a/client/request/subscription.go b/client/request/subscription.go index bb4e01156c..08276e7ef7 100644 --- a/client/request/subscription.go +++ b/client/request/subscription.go @@ -19,13 +19,12 @@ import ( // arguments type ObjectSubscription struct { Field + ChildSelect + + Filterable // Collection is the target collection name Collection string - - Filter immutable.Option[Filter] - - Fields []Selection } // ToSelect returns a basic Select object, with the same Name, Alias, and Fields as @@ -36,9 +35,13 @@ func (m ObjectSubscription) ToSelect(docID, cid string) *Select { Name: m.Collection, Alias: m.Alias, }, - DocIDs: immutable.Some([]string{docID}), - CID: immutable.Some(cid), - Fields: m.Fields, - Filter: m.Filter, + DocIDsFilter: DocIDsFilter{ + DocIDs: immutable.Some([]string{docID}), + }, + CIDFilter: CIDFilter{ + immutable.Some(cid), + }, + ChildSelect: m.ChildSelect, + Filterable: m.Filterable, } } diff --git a/db/collection_update.go b/db/collection_update.go index 496d8bf81c..a4a4dc3f4d 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -434,8 +434,12 @@ func (c *collection) makeSelectLocal(filter immutable.Option[request.Filter]) (* Field: request.Field{ Name: c.Name().Value(), }, - Filter: filter, - Fields: make([]request.Selection, 0), + Filterable: request.Filterable{ + Filter: filter, + }, + ChildSelect: request.ChildSelect{ + Fields: make([]request.Selection, 0), + }, } for _, fd := range c.Schema().Fields { diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index b48fbb6f9c..17b7f86611 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -33,13 +33,26 @@ var ( FilterEqOp = &Operator{Operation: "_eq"} ) +// SelectionType is the type of selection. +type SelectionType int + +const ( + ObjectSelection SelectionType = iota + CommitSelection +) + // ToSelect converts the given [parser.Select] into a [Select]. // // In the process of doing so it will construct the document map required to access the data // yielded by the [Select]. -func ToSelect(ctx context.Context, store client.Store, selectRequest *request.Select) (*Select, error) { +func ToSelect( + ctx context.Context, + store client.Store, + rootSelectType SelectionType, + selectRequest *request.Select, +) (*Select, error) { // the top-level select will always have index=0, and no parent collection name - return toSelect(ctx, store, 0, selectRequest, "") + return toSelect(ctx, store, rootSelectType, 0, selectRequest, "") } // toSelect converts the given [parser.Select] into a [Select]. @@ -49,28 +62,35 @@ func ToSelect(ctx context.Context, store client.Store, selectRequest *request.Se func toSelect( ctx context.Context, store client.Store, + rootSelectType SelectionType, thisIndex int, selectRequest *request.Select, parentCollectionName string, ) (*Select, error) { - collectionName, err := getCollectionName(ctx, store, selectRequest, parentCollectionName) + if rootSelectType == ObjectSelection && selectRequest.Name == request.VersionFieldName { + // WARNING: This is a weird quirk upon which some of the mapper code is dependent upon + // please remove it if/when you have chance to. + rootSelectType = CommitSelection + } + + collectionName, err := getCollectionName(ctx, store, rootSelectType, selectRequest, parentCollectionName) if err != nil { return nil, err } - mapping, definition, err := getTopLevelInfo(ctx, store, selectRequest, collectionName) + mapping, definition, err := getTopLevelInfo(ctx, store, rootSelectType, selectRequest, collectionName) if err != nil { return nil, err } - fields, aggregates, err := getRequestables(ctx, selectRequest, mapping, collectionName, store) + fields, aggregates, err := getRequestables(ctx, rootSelectType, selectRequest, mapping, collectionName, store) if err != nil { return nil, err } // Needs to be done before resolving aggregates, else filter conversion may fail there filterDependencies, err := resolveFilterDependencies( - ctx, store, collectionName, selectRequest.Filter, mapping, fields) + ctx, store, rootSelectType, collectionName, selectRequest.Filter, mapping, fields) if err != nil { return nil, err } @@ -78,7 +98,7 @@ func toSelect( // Resolve order dependencies that may have been missed due to not being rendered. err = resolveOrderDependencies( - ctx, store, collectionName, selectRequest.OrderBy, mapping, &fields) + ctx, store, rootSelectType, collectionName, selectRequest.OrderBy, mapping, &fields) if err != nil { return nil, err } @@ -86,7 +106,7 @@ func toSelect( aggregates = appendUnderlyingAggregates(aggregates, mapping) fields, err = resolveAggregates( ctx, - selectRequest, + rootSelectType, aggregates, fields, mapping, @@ -100,7 +120,15 @@ func toSelect( } if len(definition.Schema.Fields) != 0 { - fields, err = resolveSecondaryRelationIDs(ctx, store, collectionName, definition.Schema, mapping, fields) + fields, err = resolveSecondaryRelationIDs( + ctx, + store, + rootSelectType, + collectionName, + definition.Schema, + mapping, + fields, + ) if err != nil { return nil, err } @@ -146,6 +174,7 @@ func toSelect( func resolveOrderDependencies( ctx context.Context, store client.Store, + rootSelectType SelectionType, descName string, source immutable.Option[request.OrderBy], mapping *core.DocumentMapping, @@ -170,7 +199,15 @@ outer: joinField := fields[0] // ensure the child select is resolved for this order join - innerSelect, err := resolveChildOrder(ctx, store, descName, joinField, mapping, currentExistingFields) + innerSelect, err := resolveChildOrder( + ctx, + store, + rootSelectType, + descName, + joinField, + mapping, + currentExistingFields, + ) if err != nil { return err } @@ -188,7 +225,7 @@ outer: joinField := fields[0] // ensure the child select is resolved for this order join - innerSelect, err := resolveChildOrder(ctx, store, descName, joinField, mapping, existingFields) + innerSelect, err := resolveChildOrder(ctx, store, rootSelectType, descName, joinField, mapping, existingFields) if err != nil { return err } @@ -215,6 +252,7 @@ outer: func resolveChildOrder( ctx context.Context, store client.Store, + rootSelectType SelectionType, descName string, orderChildField string, mapping *core.DocumentMapping, @@ -232,7 +270,7 @@ func resolveChildOrder( Name: orderChildField, }, } - innerSelect, err := toSelect(ctx, store, index, &dummyJoinFieldSelect, descName) + innerSelect, err := toSelect(ctx, store, rootSelectType, index, &dummyJoinFieldSelect, descName) if err != nil { return nil, err } @@ -262,7 +300,7 @@ func resolveChildOrder( // updated with any new fields/aggregates. func resolveAggregates( ctx context.Context, - selectRequest *request.Select, + rootSelectType SelectionType, aggregates []*aggregateRequest, inputFields []Requestable, mapping *core.DocumentMapping, @@ -334,7 +372,6 @@ func resolveAggregates( index := mapping.GetNextIndex() hostSelectRequest := &request.Select{ - Root: selectRequest.Root, Field: request.Field{ Name: target.hostExternalName, }, @@ -344,24 +381,31 @@ func resolveAggregates( collectionName = "" } - childCollectionName, err := getCollectionName(ctx, store, hostSelectRequest, collectionName) + childCollectionName, err := getCollectionName(ctx, store, rootSelectType, hostSelectRequest, collectionName) if err != nil { return nil, err } - mapAggregateNestedTargets(target, hostSelectRequest, selectRequest.Root) + mapAggregateNestedTargets(target, hostSelectRequest) - childMapping, _, err := getTopLevelInfo(ctx, store, hostSelectRequest, childCollectionName) + childMapping, _, err := getTopLevelInfo(ctx, store, rootSelectType, hostSelectRequest, childCollectionName) if err != nil { return nil, err } - childFields, _, err := getRequestables(ctx, hostSelectRequest, childMapping, childCollectionName, store) + childFields, _, err := getRequestables( + ctx, + rootSelectType, + hostSelectRequest, + childMapping, + childCollectionName, + store, + ) if err != nil { return nil, err } err = resolveOrderDependencies( - ctx, store, childCollectionName, target.order, childMapping, &childFields) + ctx, store, rootSelectType, childCollectionName, target.order, childMapping, &childFields) if err != nil { return nil, err } @@ -373,6 +417,7 @@ func resolveAggregates( filterDependencies, err := resolveFilterDependencies( ctx, store, + rootSelectType, childCollectionName, target.filter, mapping.ChildMappings[index], @@ -481,13 +526,11 @@ func resolveAggregates( func mapAggregateNestedTargets( target *aggregateRequestTarget, hostSelectRequest *request.Select, - selectionType request.SelectionType, ) { if target.order.HasValue() { for _, cond := range target.order.Value().Conditions { if len(cond.Fields) > 1 { hostSelectRequest.Fields = append(hostSelectRequest.Fields, &request.Select{ - Root: selectionType, Field: request.Field{ Name: cond.Fields[0], }, @@ -503,7 +546,6 @@ func mapAggregateNestedTargets( for _, innerCond := range cond { if _, isMap := innerCond.(map[string]any); isMap { hostSelectRequest.Fields = append(hostSelectRequest.Fields, &request.Select{ - Root: selectionType, Field: request.Field{ Name: topKey, }, @@ -619,6 +661,7 @@ func appendIfNotExists( // consumed mapping data. func getRequestables( ctx context.Context, + rootSelectType SelectionType, selectRequest *request.Select, mapping *core.DocumentMapping, collectionName string, @@ -644,7 +687,7 @@ func getRequestables( case *request.Select: index := mapping.GetNextIndex() - innerSelect, err := toSelect(ctx, store, index, f, collectionName) + innerSelect, err := toSelect(ctx, store, rootSelectType, index, f, collectionName) if err != nil { return nil, nil, err } @@ -710,6 +753,7 @@ func getAggregateRequests(index int, aggregate *request.Aggregate) (aggregateReq func getCollectionName( ctx context.Context, store client.Store, + rootSelectType SelectionType, selectRequest *request.Select, parentCollectionName string, ) (string, error) { @@ -719,7 +763,7 @@ func getCollectionName( if selectRequest.Name == request.GroupFieldName { return parentCollectionName, nil - } else if selectRequest.Root == request.CommitSelection { + } else if rootSelectType == CommitSelection { return parentCollectionName, nil } @@ -744,6 +788,7 @@ func getCollectionName( func getTopLevelInfo( ctx context.Context, store client.Store, + rootSelectType SelectionType, selectRequest *request.Select, collectionName string, ) (*core.DocumentMapping, client.CollectionDefinition, error) { @@ -755,7 +800,7 @@ func getTopLevelInfo( return mapping, client.CollectionDefinition{}, nil } - if selectRequest.Root == request.ObjectSelection { + if rootSelectType == ObjectSelection { var definition client.CollectionDefinition collection, err := store.GetCollectionByName(ctx, collectionName) if err != nil { @@ -834,6 +879,7 @@ func getTopLevelInfo( func resolveFilterDependencies( ctx context.Context, store client.Store, + rootSelectType SelectionType, parentCollectionName string, source immutable.Option[request.Filter], mapping *core.DocumentMapping, @@ -846,6 +892,7 @@ func resolveFilterDependencies( return resolveInnerFilterDependencies( ctx, store, + rootSelectType, parentCollectionName, source.Value().Conditions, mapping, @@ -857,6 +904,7 @@ func resolveFilterDependencies( func resolveInnerFilterDependencies( ctx context.Context, store client.Store, + rootSelectType SelectionType, parentCollectionName string, source map[string]any, mapping *core.DocumentMapping, @@ -872,6 +920,7 @@ func resolveInnerFilterDependencies( innerFields, err := resolveInnerFilterDependencies( ctx, store, + rootSelectType, parentCollectionName, innerFilter.(map[string]any), mapping, @@ -891,6 +940,7 @@ func resolveInnerFilterDependencies( innerFields, err := resolveInnerFilterDependencies( ctx, store, + rootSelectType, parentCollectionName, notFilter, mapping, @@ -934,7 +984,7 @@ func resolveInnerFilterDependencies( } } else { var err error - childSelect, err = constructEmptyJoin(ctx, store, parentCollectionName, mapping, key) + childSelect, err = constructEmptyJoin(ctx, store, rootSelectType, parentCollectionName, mapping, key) if err != nil { return nil, err } @@ -951,7 +1001,7 @@ func resolveInnerFilterDependencies( } dummyParsed := &request.Select{Field: request.Field{Name: key}} - childCollectionName, err := getCollectionName(ctx, store, dummyParsed, parentCollectionName) + childCollectionName, err := getCollectionName(ctx, store, rootSelectType, dummyParsed, parentCollectionName) if err != nil { return nil, err } @@ -959,6 +1009,7 @@ func resolveInnerFilterDependencies( childFields, err := resolveInnerFilterDependencies( ctx, store, + rootSelectType, childCollectionName, childFilter, childSelect.DocumentMapping, @@ -979,6 +1030,7 @@ func resolveInnerFilterDependencies( func constructEmptyJoin( ctx context.Context, store client.Store, + rootSelectType SelectionType, parentCollectionName string, parentMapping *core.DocumentMapping, name string, @@ -991,12 +1043,12 @@ func constructEmptyJoin( }, } - childCollectionName, err := getCollectionName(ctx, store, dummyParsed, parentCollectionName) + childCollectionName, err := getCollectionName(ctx, store, rootSelectType, dummyParsed, parentCollectionName) if err != nil { return nil, err } - childMapping, _, err := getTopLevelInfo(ctx, store, dummyParsed, childCollectionName) + childMapping, _, err := getTopLevelInfo(ctx, store, rootSelectType, dummyParsed, childCollectionName) if err != nil { return nil, err } @@ -1025,6 +1077,7 @@ func constructEmptyJoin( func resolveSecondaryRelationIDs( ctx context.Context, store client.Store, + rootSelectType SelectionType, collectionName string, schema client.SchemaDescription, mapping *core.DocumentMapping, @@ -1064,6 +1117,7 @@ func resolveSecondaryRelationIDs( join, err := constructEmptyJoin( ctx, store, + rootSelectType, collectionName, mapping, objectFieldName, @@ -1088,7 +1142,7 @@ func ToCommitSelect( store client.Store, selectRequest *request.CommitSelect, ) (*CommitSelect, error) { - underlyingSelect, err := ToSelect(ctx, store, selectRequest.ToSelect()) + underlyingSelect, err := ToSelect(ctx, store, CommitSelection, selectRequest.ToSelect()) if err != nil { return nil, err } @@ -1097,7 +1151,7 @@ func ToCommitSelect( DocID: selectRequest.DocID, FieldID: selectRequest.FieldID, Depth: selectRequest.Depth, - Cid: selectRequest.Cid, + Cid: selectRequest.CID, }, nil } @@ -1106,7 +1160,7 @@ func ToCommitSelect( // In the process of doing so it will construct the document map required to access the data // yielded by the [Select] embedded in the [Mutation]. func ToMutation(ctx context.Context, store client.Store, mutationRequest *request.ObjectMutation) (*Mutation, error) { - underlyingSelect, err := ToSelect(ctx, store, mutationRequest.ToSelect()) + underlyingSelect, err := ToSelect(ctx, store, ObjectSelection, mutationRequest.ToSelect()) if err != nil { return nil, err } diff --git a/planner/planner.go b/planner/planner.go index 0629076924..b2d9bc47a9 100644 --- a/planner/planner.go +++ b/planner/planner.go @@ -114,7 +114,7 @@ func (p *Planner) newPlan(stmt any) (planNode, error) { return p.newPlan(n.Selections[0]) case *request.Select: - m, err := mapper.ToSelect(p.ctx, p.db, n) + m, err := mapper.ToSelect(p.ctx, p.db, mapper.ObjectSelection, n) if err != nil { return nil, err } diff --git a/planner/view.go b/planner/view.go index f02de06d27..2bb5f94fa8 100644 --- a/planner/view.go +++ b/planner/view.go @@ -33,7 +33,7 @@ func (p *Planner) View(query *mapper.Select, col client.Collection) (planNode, e querySource := (col.Description().Sources[0].(*client.QuerySource)) hasTransform := querySource.Transform.HasValue() - m, err := mapper.ToSelect(p.ctx, p.db, &querySource.Query) + m, err := mapper.ToSelect(p.ctx, p.db, mapper.ObjectSelection, &querySource.Query) if err != nil { return nil, err } diff --git a/request/graphql/parser/commit.go b/request/graphql/parser/commit.go index e4d4c01903..a6b468fc35 100644 --- a/request/graphql/parser/commit.go +++ b/request/graphql/parser/commit.go @@ -36,7 +36,7 @@ func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) commit.DocID = immutable.Some(raw.Value) } else if prop == request.Cid { raw := argument.Value.(*ast.StringValue) - commit.Cid = immutable.Some(raw.Value) + commit.CID = immutable.Some(raw.Value) } else if prop == request.FieldIDName { raw := argument.Value.(*ast.StringValue) commit.FieldID = immutable.Some(raw.Value) @@ -112,7 +112,7 @@ func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) return nil, err } - commit.Fields, err = parseSelectFields(schema, request.CommitSelection, fieldObject, field.SelectionSet) + commit.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) return commit, err } diff --git a/request/graphql/parser/mutation.go b/request/graphql/parser/mutation.go index 27becabb71..92071b6e93 100644 --- a/request/graphql/parser/mutation.go +++ b/request/graphql/parser/mutation.go @@ -116,7 +116,7 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re mut.Filter = filter } else if prop == request.DocIDArgName { raw := argument.Value.(*ast.StringValue) - mut.IDs = immutable.Some([]string{raw.Value}) + mut.DocIDs = immutable.Some([]string{raw.Value}) } else if prop == request.DocIDsArgName { raw := argument.Value.(*ast.ListValue) ids := make([]string, len(raw.Values)) @@ -127,7 +127,7 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re } ids[i] = id.Value } - mut.IDs = immutable.Some(ids) + mut.DocIDs = immutable.Some(ids) } } @@ -141,7 +141,7 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re return nil, err } - mut.Fields, err = parseSelectFields(schema, request.ObjectSelection, fieldObject, field.SelectionSet) + mut.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) return mut, err } diff --git a/request/graphql/parser/query.go b/request/graphql/parser/query.go index 3213c7489a..48fde3db1f 100644 --- a/request/graphql/parser/query.go +++ b/request/graphql/parser/query.go @@ -55,14 +55,16 @@ func parseQueryOperationDefinition( Name: parsed.Name, Alias: parsed.Alias, }, - Fields: []request.Selection{ - parsed, + ChildSelect: request.ChildSelect{ + Fields: []request.Selection{ + parsed, + }, }, } } else { // the query doesn't match a reserve name // so its probably a generated query - parsed, err := parseSelect(schema, request.ObjectSelection, schema.QueryType(), node, i) + parsed, err := parseSelect(schema, schema.QueryType(), node, i) if err != nil { return nil, []error{err} } @@ -90,7 +92,6 @@ func parseQueryOperationDefinition( // filters, limits, orders, etc.. func parseSelect( schema gql.Schema, - rootType request.SelectionType, parent *gql.Object, field *ast.Field, index int, @@ -100,7 +101,6 @@ func parseSelect( Name: field.Name.Value, Alias: getFieldAlias(field), }, - Root: rootType, } fieldDef := gql.GetFieldDef(schema, parent, slct.Name) @@ -191,7 +191,7 @@ func parseSelect( return nil, err } - slct.Fields, err = parseSelectFields(schema, slct.Root, fieldObject, field.SelectionSet) + slct.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) if err != nil { return nil, err } @@ -306,10 +306,18 @@ func parseAggregate(schema gql.Schema, parent *gql.Object, field *ast.Field, ind targets[i] = &request.AggregateTarget{ HostName: hostName, ChildName: immutable.Some(childName), - Filter: filter, - Limit: limit, - Offset: offset, - OrderBy: order, + Filterable: request.Filterable{ + Filter: filter, + }, + Limitable: request.Limitable{ + Limit: limit, + }, + Offsetable: request.Offsetable{ + Offset: offset, + }, + Orderable: request.Orderable{ + OrderBy: order, + }, } } } diff --git a/request/graphql/parser/request.go b/request/graphql/parser/request.go index 69d275de03..f0a73a4667 100644 --- a/request/graphql/parser/request.go +++ b/request/graphql/parser/request.go @@ -162,7 +162,6 @@ func getFieldAlias(field *ast.Field) immutable.Option[string] { func parseSelectFields( schema gql.Schema, - root request.SelectionType, parent *gql.Object, fields *ast.SelectionSet) ([]request.Selection, error) { selections := make([]request.Selection, len(fields.Selections)) @@ -179,13 +178,7 @@ func parseSelectFields( } else if node.SelectionSet == nil { // regular field selections[i] = parseField(node) } else { // sub type with extra fields - subroot := root - switch node.Name.Value { - case request.VersionFieldName: - subroot = request.CommitSelection - } - - s, err := parseSelect(schema, subroot, parent, node, i) + s, err := parseSelect(schema, parent, node, i) if err != nil { return nil, err } diff --git a/request/graphql/parser/subscription.go b/request/graphql/parser/subscription.go index 354645beb4..0e6042f931 100644 --- a/request/graphql/parser/subscription.go +++ b/request/graphql/parser/subscription.go @@ -79,6 +79,6 @@ func parseSubscription(schema gql.Schema, field *ast.Field) (*request.ObjectSubs return nil, err } - sub.Fields, err = parseSelectFields(schema, request.ObjectSelection, fieldObject, field.SelectionSet) + sub.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) return sub, err } From a2591f81da240b40ffcf76d461a3a12248ca7eca Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Fri, 22 Mar 2024 16:06:36 -0400 Subject: [PATCH 13/49] ci(i): Disable windows build (#2450) ## Relevant issue(s) Resolves #2449 ## Description - Disable windows build until wasmtime-go is fixed - To enable back just uncomment the lines in: `.github/workflows/test-and-upload-coverage.yml` --- .github/workflows/test-and-upload-coverage.yml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml index 60858b1f86..491b674906 100644 --- a/.github/workflows/test-and-upload-coverage.yml +++ b/.github/workflows/test-and-upload-coverage.yml @@ -46,11 +46,13 @@ jobs: database-type: badger-memory mutation-type: collection-save detect-changes: false - - os: windows-latest - client-type: go - database-type: badger-memory - mutation-type: collection-save - detect-changes: false +## TODO: https://github.com/sourcenetwork/defradb/issues/2080 +## Uncomment the lines below to Re-enable the windows build once this todo is resolved. +## - os: windows-latest +## client-type: go +## database-type: badger-memory +## mutation-type: collection-save +## detect-changes: false runs-on: ${{ matrix.os }} From a2c386319fda75c404488bbe5acdb1e52583ef3b Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Thu, 28 Mar 2024 15:40:38 +0100 Subject: [PATCH 14/49] refactor: Add NormalValue (#2404) ## Relevant issue(s) Resolves #2403 ## Description This change introduces `NormalValue` type that carries a standard (or normal) value that doesn't need to be type-asserted with all possible types. --- client/document.go | 112 +- client/errors.go | 23 + client/normal_array.go | 149 ++ client/normal_array_of_nillables.go | 142 ++ client/normal_new.go | 465 +++++ client/normal_nil.go | 48 + client/normal_nillable_array.go | 152 ++ client/normal_nillable_array_of_nillables.go | 160 ++ client/normal_nillable_scalar.go | 148 ++ client/normal_scalar.go | 130 ++ client/normal_util.go | 118 ++ client/normal_value.go | 207 +++ client/normal_value_test.go | 1624 +++++++++++++++++ client/normal_void.go | 205 +++ client/schema_field_description.go | 8 +- client/value.go | 31 +- core/encoding.go | 36 +- core/errors.go | 7 + core/key.go | 2 +- core/key_test.go | 35 +- db/fetcher/indexer.go | 11 +- db/fetcher/indexer_iterators.go | 242 ++- db/index.go | 20 +- db/indexed_docs_test.go | 64 +- encoding/field_value.go | 83 +- encoding/field_value_test.go | 36 +- .../query_with_index_only_field_order_test.go | 2 +- .../query_with_index_only_filter_test.go | 53 + ...uery_with_unique_index_only_filter_test.go | 111 ++ .../query/simple/with_filter/with_in_test.go | 41 + 30 files changed, 4154 insertions(+), 311 deletions(-) create mode 100644 client/normal_array.go create mode 100644 client/normal_array_of_nillables.go create mode 100644 client/normal_new.go create mode 100644 client/normal_nil.go create mode 100644 client/normal_nillable_array.go create mode 100644 client/normal_nillable_array_of_nillables.go create mode 100644 client/normal_nillable_scalar.go create mode 100644 client/normal_scalar.go create mode 100644 client/normal_util.go create mode 100644 client/normal_value.go create mode 100644 client/normal_value_test.go create mode 100644 client/normal_void.go diff --git a/client/document.go b/client/document.go index c2ca6c90a1..531ccd42cd 100644 --- a/client/document.go +++ b/client/document.go @@ -172,29 +172,17 @@ func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { return docs, nil } -// IsNillableKind returns true if the given FieldKind is nillable. -func IsNillableKind(kind FieldKind) bool { - switch kind { - case FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_BLOB, FieldKind_NILLABLE_JSON, - FieldKind_NILLABLE_BOOL, FieldKind_NILLABLE_FLOAT, FieldKind_NILLABLE_DATETIME, - FieldKind_NILLABLE_INT: - return true - default: - return false - } -} - // validateFieldSchema takes a given value as an interface, // and ensures it matches the supplied field description. // It will do any minor parsing, like dates, and return // the typed value again as an interface. -func validateFieldSchema(val any, field SchemaFieldDescription) (any, error) { - if IsNillableKind(field.Kind) { +func validateFieldSchema(val any, field SchemaFieldDescription) (NormalValue, error) { + if field.Kind.IsNillable() { if val == nil { - return nil, nil + return NewNormalNil(field.Kind) } if v, ok := val.(*fastjson.Value); ok && v.Type() == fastjson.TypeNull { - return nil, nil + return NewNormalNil(field.Kind) } } @@ -203,51 +191,111 @@ func validateFieldSchema(val any, field SchemaFieldDescription) (any, error) { } if field.Kind.IsObject() { - return getString(val) + v, err := getString(val) + if err != nil { + return nil, err + } + return NewNormalString(v), nil } switch field.Kind { case FieldKind_DocID, FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_BLOB: - return getString(val) + v, err := getString(val) + if err != nil { + return nil, err + } + return NewNormalString(v), nil case FieldKind_STRING_ARRAY: - return getArray(val, getString) + v, err := getArray(val, getString) + if err != nil { + return nil, err + } + return NewNormalStringArray(v), nil case FieldKind_NILLABLE_STRING_ARRAY: - return getNillableArray(val, getString) + v, err := getNillableArray(val, getString) + if err != nil { + return nil, err + } + return NewNormalNillableStringArray(v), nil case FieldKind_NILLABLE_BOOL: - return getBool(val) + v, err := getBool(val) + if err != nil { + return nil, err + } + return NewNormalBool(v), nil case FieldKind_BOOL_ARRAY: - return getArray(val, getBool) + v, err := getArray(val, getBool) + if err != nil { + return nil, err + } + return NewNormalBoolArray(v), nil case FieldKind_NILLABLE_BOOL_ARRAY: - return getNillableArray(val, getBool) + v, err := getNillableArray(val, getBool) + if err != nil { + return nil, err + } + return NewNormalNillableBoolArray(v), nil case FieldKind_NILLABLE_FLOAT: - return getFloat64(val) + v, err := getFloat64(val) + if err != nil { + return nil, err + } + return NewNormalFloat(v), nil case FieldKind_FLOAT_ARRAY: - return getArray(val, getFloat64) + v, err := getArray(val, getFloat64) + if err != nil { + return nil, err + } + return NewNormalFloatArray(v), nil case FieldKind_NILLABLE_FLOAT_ARRAY: - return getNillableArray(val, getFloat64) + v, err := getNillableArray(val, getFloat64) + if err != nil { + return nil, err + } + return NewNormalNillableFloatArray(v), nil case FieldKind_NILLABLE_DATETIME: - return getDateTime(val) + v, err := getDateTime(val) + if err != nil { + return nil, err + } + return NewNormalTime(v), nil case FieldKind_NILLABLE_INT: - return getInt64(val) + v, err := getInt64(val) + if err != nil { + return nil, err + } + return NewNormalInt(v), nil case FieldKind_INT_ARRAY: - return getArray(val, getInt64) + v, err := getArray(val, getInt64) + if err != nil { + return nil, err + } + return NewNormalIntArray(v), nil case FieldKind_NILLABLE_INT_ARRAY: - return getNillableArray(val, getInt64) + v, err := getNillableArray(val, getInt64) + if err != nil { + return nil, err + } + return NewNormalNillableIntArray(v), nil case FieldKind_NILLABLE_JSON: - return getJSON(val) + v, err := getJSON(val) + if err != nil { + return nil, err + } + return NewNormalString(v), nil } return nil, NewErrUnhandledType("FieldKind", field.Kind) @@ -575,7 +623,7 @@ func (doc *Document) set(t CType, field string, value *FieldValue) error { return nil } -func (doc *Document) setCBOR(t CType, field string, val any) error { +func (doc *Document) setCBOR(t CType, field string, val NormalValue) error { value := NewFieldValue(t, val) return doc.set(t, field, value) } diff --git a/client/errors.go b/client/errors.go index c86ac274c7..71a111d431 100644 --- a/client/errors.go +++ b/client/errors.go @@ -31,6 +31,9 @@ const ( errFailedToUnmarshalCollection string = "failed to unmarshal collection json" errOperationNotPermittedOnNamelessCols string = "operation not permitted on nameless collection" errInvalidJSONPayload string = "invalid JSON payload" + errCanNotNormalizeValue string = "can not normalize value" + errCanNotTurnNormalValueIntoArray string = "can not turn normal value into array" + errCanNotMakeNormalNilFromFieldKind string = "can not make normal nil from field kind" ) // Errors returnable from this package. @@ -51,6 +54,9 @@ var ( ErrMalformedDocID = errors.New("malformed document ID, missing either version or cid") ErrInvalidDocIDVersion = errors.New("invalid document ID version") ErrInvalidJSONPayload = errors.New(errInvalidJSONPayload) + ErrCanNotNormalizeValue = errors.New(errCanNotNormalizeValue) + ErrCanNotTurnNormalValueIntoArray = errors.New(errCanNotTurnNormalValueIntoArray) + ErrCanNotMakeNormalNilFromFieldKind = errors.New(errCanNotMakeNormalNilFromFieldKind) ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. @@ -75,6 +81,23 @@ func NewErrUnexpectedType[TExpected any](property string, actual any) error { ) } +// NewCanNotNormalizeValue returns an error indicating that the given value can not be normalized. +func NewCanNotNormalizeValue(val any) error { + return errors.New(errCanNotNormalizeValue, errors.NewKV("Value", val)) +} + +// NewCanNotTurnNormalValueIntoArray returns an error indicating that the given value can not be +// turned into an array. +func NewCanNotTurnNormalValueIntoArray(val any) error { + return errors.New(errCanNotTurnNormalValueIntoArray, errors.NewKV("Value", val)) +} + +// NewCanNotMakeNormalNilFromFieldKind returns an error indicating that a normal nil value can not be +// created from the given field kind. +func NewCanNotMakeNormalNilFromFieldKind(kind FieldKind) error { + return errors.New(errCanNotMakeNormalNilFromFieldKind, errors.NewKV("Kind", kind)) +} + // NewErrUnhandledType returns an error indicating that the given value is of // a type that is not handled. func NewErrUnhandledType(property string, actual any) error { diff --git a/client/normal_array.go b/client/normal_array.go new file mode 100644 index 0000000000..00133a0f74 --- /dev/null +++ b/client/normal_array.go @@ -0,0 +1,149 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "golang.org/x/exp/constraints" +) + +type baseArrayNormalValue[T any] struct { + NormalVoid + val T +} + +func (v baseArrayNormalValue[T]) Unwrap() any { + return v.val +} + +func (v baseArrayNormalValue[T]) IsArray() bool { + return true +} + +func newBaseArrayNormalValue[T any](val T) baseArrayNormalValue[T] { + return baseArrayNormalValue[T]{val: val} +} + +type normalBoolArray struct { + baseArrayNormalValue[[]bool] +} + +func (v normalBoolArray) BoolArray() ([]bool, bool) { + return v.val, true +} + +type normalIntArray struct { + baseArrayNormalValue[[]int64] +} + +func (v normalIntArray) IntArray() ([]int64, bool) { + return v.val, true +} + +type normalFloatArray struct { + baseArrayNormalValue[[]float64] +} + +func (v normalFloatArray) FloatArray() ([]float64, bool) { + return v.val, true +} + +type normalStringArray struct { + baseArrayNormalValue[[]string] +} + +func (v normalStringArray) StringArray() ([]string, bool) { + return v.val, true +} + +type normalBytesArray struct { + baseArrayNormalValue[[][]byte] +} + +func (v normalBytesArray) BytesArray() ([][]byte, bool) { + return v.val, true +} + +type normalTimeArray struct { + baseArrayNormalValue[[]time.Time] +} + +func (v normalTimeArray) TimeArray() ([]time.Time, bool) { + return v.val, true +} + +type normalDocumentArray struct { + baseArrayNormalValue[[]*Document] +} + +func (v normalDocumentArray) DocumentArray() ([]*Document, bool) { + return v.val, true +} + +// NewNormalBoolArray creates a new NormalValue that represents a `[]bool` value. +func NewNormalBoolArray(val []bool) NormalValue { + return normalBoolArray{newBaseArrayNormalValue(val)} +} + +// NewNormalIntArray creates a new NormalValue that represents a `[]int64` value. +func NewNormalIntArray[T constraints.Integer | constraints.Float](val []T) NormalValue { + return normalIntArray{newBaseArrayNormalValue(normalizeNumArr[int64](val))} +} + +// NewNormalFloatArray creates a new NormalValue that represents a `[]float64` value. +func NewNormalFloatArray[T constraints.Integer | constraints.Float](val []T) NormalValue { + return normalFloatArray{newBaseArrayNormalValue(normalizeNumArr[float64](val))} +} + +// NewNormalStringArray creates a new NormalValue that represents a `[]string` value. +func NewNormalStringArray[T string | []byte](val []T) NormalValue { + return normalStringArray{newBaseArrayNormalValue(normalizeCharsArr[string](val))} +} + +// NewNormalBytesArray creates a new NormalValue that represents a `[][]byte` value. +func NewNormalBytesArray[T string | []byte](val []T) NormalValue { + return normalBytesArray{newBaseArrayNormalValue(normalizeCharsArr[[]byte](val))} +} + +// NewNormalTimeArray creates a new NormalValue that represents a `[]time.Time` value. +func NewNormalTimeArray(val []time.Time) NormalValue { + return normalTimeArray{newBaseArrayNormalValue(val)} +} + +// NewNormalDocumentArray creates a new NormalValue that represents a `[]*Document` value. +func NewNormalDocumentArray(val []*Document) NormalValue { + return normalDocumentArray{newBaseArrayNormalValue(val)} +} + +func normalizeNumArr[R int64 | float64, T constraints.Integer | constraints.Float](val []T) []R { + var v any = val + if arr, ok := v.([]R); ok { + return arr + } + arr := make([]R, len(val)) + for i, v := range val { + arr[i] = R(v) + } + return arr +} + +func normalizeCharsArr[R string | []byte, T string | []byte](val []T) []R { + var v any = val + if arr, ok := v.([]R); ok { + return arr + } + arr := make([]R, len(val)) + for i, v := range val { + arr[i] = R(v) + } + return arr +} diff --git a/client/normal_array_of_nillables.go b/client/normal_array_of_nillables.go new file mode 100644 index 0000000000..53461f6afa --- /dev/null +++ b/client/normal_array_of_nillables.go @@ -0,0 +1,142 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" + "golang.org/x/exp/constraints" +) + +type normalNillableBoolArray struct { + baseArrayNormalValue[[]immutable.Option[bool]] +} + +func (v normalNillableBoolArray) NillableBoolArray() ([]immutable.Option[bool], bool) { + return v.val, true +} + +type normalNillableIntArray struct { + baseArrayNormalValue[[]immutable.Option[int64]] +} + +func (v normalNillableIntArray) NillableIntArray() ([]immutable.Option[int64], bool) { + return v.val, true +} + +type normalNillableFloatArray struct { + baseArrayNormalValue[[]immutable.Option[float64]] +} + +func (v normalNillableFloatArray) NillableFloatArray() ([]immutable.Option[float64], bool) { + return v.val, true +} + +type normalNillableStringArray struct { + baseArrayNormalValue[[]immutable.Option[string]] +} + +func (v normalNillableStringArray) NillableStringArray() ([]immutable.Option[string], bool) { + return v.val, true +} + +type normalNillableBytesArray struct { + baseArrayNormalValue[[]immutable.Option[[]byte]] +} + +func (v normalNillableBytesArray) NillableBytesArray() ([]immutable.Option[[]byte], bool) { + return v.val, true +} + +type normalNillableTimeArray struct { + baseArrayNormalValue[[]immutable.Option[time.Time]] +} + +func (v normalNillableTimeArray) NillableTimeArray() ([]immutable.Option[time.Time], bool) { + return v.val, true +} + +type normalNillableDocumentArray struct { + baseArrayNormalValue[[]immutable.Option[*Document]] +} + +func (v normalNillableDocumentArray) NillableDocumentArray() ([]immutable.Option[*Document], bool) { + return v.val, true +} + +// NewNormalNillableBoolNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[bool]]` value. +func NewNormalNillableBoolArray(val []immutable.Option[bool]) NormalValue { + return normalNillableBoolArray{newBaseArrayNormalValue(val)} +} + +// NewNormalNillableIntArray creates a new NormalValue that represents a `[]immutable.Option[int64]` value. +func NewNormalNillableIntArray[T constraints.Integer | constraints.Float](val []immutable.Option[T]) NormalValue { + return normalNillableIntArray{newBaseArrayNormalValue(normalizeNillableNumArr[int64](val))} +} + +// NewNormalNillableFloatArray creates a new NormalValue that represents a `[]immutable.Option[float64]` value. +func NewNormalNillableFloatArray[T constraints.Integer | constraints.Float]( + val []immutable.Option[T], +) NormalValue { + return normalNillableFloatArray{newBaseArrayNormalValue(normalizeNillableNumArr[float64](val))} +} + +// NewNormalNillableStringArray creates a new NormalValue that represents a `[]immutable.Option[string]` value. +func NewNormalNillableStringArray[T string | []byte](val []immutable.Option[T]) NormalValue { + return normalNillableStringArray{newBaseArrayNormalValue(normalizeNillableCharsArr[string](val))} +} + +// NewNormalNillableBytesArray creates a new NormalValue that represents a `[]immutable.Option[[]byte]` value. +func NewNormalNillableBytesArray[T string | []byte](val []immutable.Option[T]) NormalValue { + return normalNillableBytesArray{newBaseArrayNormalValue(normalizeNillableCharsArr[[]byte](val))} +} + +// NewNormalNillableTimeArray creates a new NormalValue that represents a `[]immutable.Option[time.Time]` value. +func NewNormalNillableTimeArray(val []immutable.Option[time.Time]) NormalValue { + return normalNillableTimeArray{newBaseArrayNormalValue(val)} +} + +// NewNormalNillableDocumentArray creates a new NormalValue that represents a `[]immutable.Option[*Document]` value. +func NewNormalNillableDocumentArray(val []immutable.Option[*Document]) NormalValue { + return normalNillableDocumentArray{newBaseArrayNormalValue(val)} +} + +func normalizeNillableNumArr[R int64 | float64, T constraints.Integer | constraints.Float]( + val []immutable.Option[T], +) []immutable.Option[R] { + var v any = val + if arr, ok := v.([]immutable.Option[R]); ok { + return arr + } + arr := make([]immutable.Option[R], len(val)) + for i, v := range val { + arr[i] = normalizeNillableNum[R](v) + } + return arr +} + +func normalizeNillableCharsArr[R string | []byte, T string | []byte](val []immutable.Option[T]) []immutable.Option[R] { + var v any = val + if arr, ok := v.([]immutable.Option[R]); ok { + return arr + } + arr := make([]immutable.Option[R], len(val)) + for i, v := range val { + if v.HasValue() { + arr[i] = immutable.Some(R(v.Value())) + } else { + arr[i] = immutable.None[R]() + } + } + return arr +} diff --git a/client/normal_new.go b/client/normal_new.go new file mode 100644 index 0000000000..55ac46ce73 --- /dev/null +++ b/client/normal_new.go @@ -0,0 +1,465 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" +) + +// NewNormalValue creates a new NormalValue from the given value. +// It will normalize all known types that can be converted to normal ones. +// For example, if the given type is `[]int32`, it will be converted to `[]int64`. +// If the given value is of type `[]any` it will go through every element and try to convert it +// the most common type and normalizes it. +// For examples, the following conversions will be made: +// - `[]any{int32(1), int64(2)}` -> `[]int64{1, 2}`. +// - `[]any{int32(1), int64(2), float32(1.5)}` -> `[]float64{1.0, 2.0, 1.5}`. +// - `[]any{int32(1), nil}` -> `[]immutable.Option[int64]{immutable.Some(1), immutable.None[int64]()}`. +// +// This function will not check if the given value is `nil`. To normalize a `nil` value use the +// `NewNormalNil` function. +func NewNormalValue(val any) (NormalValue, error) { + switch v := val.(type) { + case bool: + return NewNormalBool(v), nil + case int8: + return newNormalInt(int64(v)), nil + case int16: + return newNormalInt(int64(v)), nil + case int32: + return newNormalInt(int64(v)), nil + case int64: + return newNormalInt(v), nil + case int: + return newNormalInt(int64(v)), nil + case uint8: + return newNormalInt(int64(v)), nil + case uint16: + return newNormalInt(int64(v)), nil + case uint32: + return newNormalInt(int64(v)), nil + case uint64: + return newNormalInt(int64(v)), nil + case uint: + return newNormalInt(int64(v)), nil + case float32: + return newNormalFloat(float64(v)), nil + case float64: + return newNormalFloat(v), nil + case string: + return NewNormalString(v), nil + case []byte: + return NewNormalBytes(v), nil + case time.Time: + return NewNormalTime(v), nil + case *Document: + return NewNormalDocument(v), nil + + case immutable.Option[bool]: + return NewNormalNillableBool(v), nil + case immutable.Option[int8]: + return NewNormalNillableInt(v), nil + case immutable.Option[int16]: + return NewNormalNillableInt(v), nil + case immutable.Option[int32]: + return NewNormalNillableInt(v), nil + case immutable.Option[int64]: + return NewNormalNillableInt(v), nil + case immutable.Option[int]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint8]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint16]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint32]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint64]: + return NewNormalNillableInt(v), nil + case immutable.Option[uint]: + return NewNormalNillableInt(v), nil + case immutable.Option[float32]: + return NewNormalNillableFloat(v), nil + case immutable.Option[float64]: + return NewNormalNillableFloat(v), nil + case immutable.Option[string]: + return NewNormalNillableString(v), nil + case immutable.Option[[]byte]: + return NewNormalNillableBytes(v), nil + case immutable.Option[time.Time]: + return NewNormalNillableTime(v), nil + case immutable.Option[*Document]: + return NewNormalNillableDocument(v), nil + + case []bool: + return NewNormalBoolArray(v), nil + case []int8: + return NewNormalIntArray(v), nil + case []int16: + return NewNormalIntArray(v), nil + case []int32: + return NewNormalIntArray(v), nil + case []int64: + return NewNormalIntArray(v), nil + case []int: + return NewNormalIntArray(v), nil + case []uint16: + return NewNormalIntArray(v), nil + case []uint32: + return NewNormalIntArray(v), nil + case []uint64: + return NewNormalIntArray(v), nil + case []uint: + return NewNormalIntArray(v), nil + case []float32: + return NewNormalFloatArray(v), nil + case []float64: + return NewNormalFloatArray(v), nil + case []string: + return NewNormalStringArray(v), nil + case [][]byte: + return NewNormalBytesArray(v), nil + case []time.Time: + return NewNormalTimeArray(v), nil + case []*Document: + return NewNormalDocumentArray(v), nil + + case []immutable.Option[bool]: + return NewNormalNillableBoolArray(v), nil + case []immutable.Option[int8]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[int16]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[int32]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[int64]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[int]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint8]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint16]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint32]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint64]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[uint]: + return NewNormalNillableIntArray(v), nil + case []immutable.Option[float32]: + return NewNormalNillableFloatArray(v), nil + case []immutable.Option[float64]: + return NewNormalNillableFloatArray(v), nil + case []immutable.Option[string]: + return NewNormalNillableStringArray(v), nil + case []immutable.Option[[]byte]: + return NewNormalNillableBytesArray(v), nil + case []immutable.Option[time.Time]: + return NewNormalNillableTimeArray(v), nil + case []immutable.Option[*Document]: + return NewNormalNillableDocumentArray(v), nil + + case immutable.Option[[]bool]: + return NewNormalBoolNillableArray(v), nil + case immutable.Option[[]int8]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]int16]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]int32]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]int64]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]int]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]uint16]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]uint32]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]uint64]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]uint]: + return NewNormalIntNillableArray(v), nil + case immutable.Option[[]float32]: + return NewNormalFloatNillableArray(v), nil + case immutable.Option[[]float64]: + return NewNormalFloatNillableArray(v), nil + case immutable.Option[[]string]: + return NewNormalStringNillableArray(v), nil + case immutable.Option[[][]byte]: + return NewNormalBytesNillableArray(v), nil + case immutable.Option[[]time.Time]: + return NewNormalTimeNillableArray(v), nil + case immutable.Option[[]*Document]: + return NewNormalDocumentNillableArray(v), nil + + case immutable.Option[[]immutable.Option[bool]]: + return NewNormalNillableBoolNillableArray(v), nil + case immutable.Option[[]immutable.Option[int8]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[int16]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[int32]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[int64]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[int]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint8]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint16]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint32]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint64]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[uint]]: + return NewNormalNillableIntNillableArray(v), nil + case immutable.Option[[]immutable.Option[float32]]: + return NewNormalNillableFloatNillableArray(v), nil + case immutable.Option[[]immutable.Option[float64]]: + return NewNormalNillableFloatNillableArray(v), nil + case immutable.Option[[]immutable.Option[string]]: + return NewNormalNillableStringNillableArray(v), nil + case immutable.Option[[]immutable.Option[[]byte]]: + return NewNormalNillableBytesNillableArray(v), nil + case immutable.Option[[]immutable.Option[time.Time]]: + return NewNormalNillableTimeNillableArray(v), nil + case immutable.Option[[]immutable.Option[*Document]]: + return NewNormalNillableDocumentNillableArray(v), nil + + case []any: + if len(v) == 0 { + return nil, NewCanNotNormalizeValue(val) + } + first, err := NewNormalValue(v[0]) + if err != nil { + return nil, err + } + if _, ok := first.Bool(); ok { + return convertAnyArrToTypedArr[bool](v, NewNormalBoolArray, NewNormalNillableBoolArray) + } + if _, ok := first.Int(); ok { + return convertAnyArrToIntOrFloatArr(v) + } + if _, ok := first.Float(); ok { + return convertAnyArrToFloatArr(v) + } + if _, ok := first.String(); ok { + return convertAnyArrToTypedArr[string](v, NewNormalStringArray, NewNormalNillableStringArray) + } + if _, ok := first.Bytes(); ok { + return convertAnyArrToTypedArr[[]byte](v, NewNormalBytesArray, NewNormalNillableBytesArray) + } + if _, ok := first.Time(); ok { + return convertAnyArrToTypedArr[time.Time](v, NewNormalTimeArray, NewNormalNillableTimeArray) + } + if _, ok := first.Document(); ok { + return convertAnyArrToTypedArr[*Document](v, NewNormalDocumentArray, NewNormalNillableDocumentArray) + } + } + return nil, NewCanNotNormalizeValue(val) +} + +func convertAnyArrToIntOrFloatArr(arr []any) (NormalValue, error) { + result := make([]int64, len(arr)) + for i := range arr { + if arr[i] == nil { + return convertAnyArrToNillableIntOrFloatArr(arr) + } + switch v := arr[i].(type) { + case int64: + result[i] = v + case float64, float32: + return convertAnyArrToFloatArr(arr) + case int8: + result[i] = int64(v) + case int16: + result[i] = int64(v) + case int32: + result[i] = int64(v) + case int: + result[i] = int64(v) + case uint8: + result[i] = int64(v) + case uint16: + result[i] = int64(v) + case uint32: + result[i] = int64(v) + case uint64: + result[i] = int64(v) + case uint: + result[i] = int64(v) + default: + return nil, NewCanNotNormalizeValue(arr) + } + } + return NewNormalIntArray(result), nil +} + +func convertAnyArrToNillableIntOrFloatArr(arr []any) (NormalValue, error) { + result := make([]immutable.Option[int64], len(arr)) + for i := range arr { + if arr[i] == nil { + result[i] = immutable.None[int64]() + continue + } + var intVal int64 + switch v := arr[i].(type) { + case int64: + intVal = v + case float64, float32: + return convertAnyArrToFloatArr(arr) + case int8: + intVal = int64(v) + case int16: + intVal = int64(v) + case int32: + intVal = int64(v) + case int: + intVal = int64(v) + case uint8: + intVal = int64(v) + case uint16: + intVal = int64(v) + case uint32: + intVal = int64(v) + case uint64: + intVal = int64(v) + case uint: + intVal = int64(v) + default: + return nil, NewCanNotNormalizeValue(arr) + } + result[i] = immutable.Some(intVal) + } + return NewNormalNillableIntArray(result), nil +} + +func convertAnyArrToFloatArr(arr []any) (NormalValue, error) { + result := make([]float64, len(arr)) + for i := range arr { + if arr[i] == nil { + return convertAnyArrToNillableFloatArr(arr) + } + + var floatVal float64 + switch v := arr[i].(type) { + case float64: + floatVal = v + case float32: + floatVal = float64(v) + case int8: + floatVal = float64(v) + case int16: + floatVal = float64(v) + case int32: + floatVal = float64(v) + case int64: + floatVal = float64(v) + case int: + floatVal = float64(v) + case uint8: + floatVal = float64(v) + case uint16: + floatVal = float64(v) + case uint32: + floatVal = float64(v) + case uint64: + floatVal = float64(v) + case uint: + floatVal = float64(v) + default: + return nil, NewCanNotNormalizeValue(arr) + } + result[i] = floatVal + } + return NewNormalFloatArray(result), nil +} + +func convertAnyArrToNillableFloatArr(arr []any) (NormalValue, error) { + result := make([]immutable.Option[float64], len(arr)) + for i := range arr { + if arr[i] == nil { + result[i] = immutable.None[float64]() + continue + } + var floatVal float64 + switch v := arr[i].(type) { + case float64: + floatVal = v + case float32: + floatVal = float64(v) + case int8: + floatVal = float64(v) + case int16: + floatVal = float64(v) + case int32: + floatVal = float64(v) + case int64: + floatVal = float64(v) + case int: + floatVal = float64(v) + case uint8: + floatVal = float64(v) + case uint16: + floatVal = float64(v) + case uint32: + floatVal = float64(v) + case uint64: + floatVal = float64(v) + case uint: + floatVal = float64(v) + default: + return nil, NewCanNotNormalizeValue(arr) + } + result[i] = immutable.Some(floatVal) + } + return NewNormalNillableFloatArray(result), nil +} + +func convertAnyArrToTypedArr[T any]( + arr []any, + newNormalArr func([]T) NormalValue, + newNormalNillableArr func([]immutable.Option[T]) NormalValue, +) (NormalValue, error) { + result := make([]T, len(arr)) + for i := range arr { + if arr[i] == nil { + return convertAnyArrToNillableTypedArr[T](arr, newNormalNillableArr) + } + if v, ok := arr[i].(T); ok { + result[i] = v + } else { + return nil, NewCanNotNormalizeValue(arr) + } + } + return newNormalArr(result), nil +} + +func convertAnyArrToNillableTypedArr[T any]( + arr []any, + newNormalNillableArr func([]immutable.Option[T]) NormalValue, +) (NormalValue, error) { + result := make([]immutable.Option[T], len(arr)) + for i := range arr { + if arr[i] == nil { + result[i] = immutable.None[T]() + continue + } + if v, ok := arr[i].(T); ok { + result[i] = immutable.Some(v) + } else { + return nil, NewCanNotNormalizeValue(arr) + } + } + return newNormalNillableArr(result), nil +} diff --git a/client/normal_nil.go b/client/normal_nil.go new file mode 100644 index 0000000000..7513fa9979 --- /dev/null +++ b/client/normal_nil.go @@ -0,0 +1,48 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" +) + +// NewNormalNil creates a new NormalValue that represents a nil value of a given field kind. +func NewNormalNil(kind FieldKind) (NormalValue, error) { + if kind.IsObject() { + return NewNormalNillableDocument(immutable.None[*Document]()), nil + } + switch kind { + case FieldKind_NILLABLE_BOOL: + return NewNormalNillableBool(immutable.None[bool]()), nil + case FieldKind_NILLABLE_INT: + return NewNormalNillableInt(immutable.None[int64]()), nil + case FieldKind_NILLABLE_FLOAT: + return NewNormalNillableFloat(immutable.None[float64]()), nil + case FieldKind_NILLABLE_DATETIME: + return NewNormalNillableTime(immutable.None[time.Time]()), nil + case FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_JSON: + return NewNormalNillableString(immutable.None[string]()), nil + case FieldKind_NILLABLE_BLOB: + return NewNormalNillableBytes(immutable.None[[]byte]()), nil + case FieldKind_NILLABLE_BOOL_ARRAY: + return NewNormalBoolNillableArray(immutable.None[[]bool]()), nil + case FieldKind_NILLABLE_INT_ARRAY: + return NewNormalIntNillableArray(immutable.None[[]int64]()), nil + case FieldKind_NILLABLE_FLOAT_ARRAY: + return NewNormalFloatNillableArray(immutable.None[[]float64]()), nil + case FieldKind_NILLABLE_STRING_ARRAY: + return NewNormalStringNillableArray(immutable.None[[]string]()), nil + default: + return nil, NewCanNotMakeNormalNilFromFieldKind(kind) + } +} diff --git a/client/normal_nillable_array.go b/client/normal_nillable_array.go new file mode 100644 index 0000000000..fa6bdc4bbb --- /dev/null +++ b/client/normal_nillable_array.go @@ -0,0 +1,152 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" + "golang.org/x/exp/constraints" +) + +type baseNillableArrayNormalValue[T any] struct { + baseArrayNormalValue[immutable.Option[T]] +} + +func (v baseNillableArrayNormalValue[T]) Unwrap() any { + if v.val.HasValue() { + return v.val.Value() + } + return nil +} + +func (v baseNillableArrayNormalValue[T]) IsNil() bool { + return !v.val.HasValue() +} + +func (v baseNillableArrayNormalValue[T]) IsNillable() bool { + return true +} + +func (v baseNillableArrayNormalValue[T]) IsArray() bool { + return true +} + +func newBaseNillableArrayNormalValue[T any](val immutable.Option[T]) baseNillableArrayNormalValue[T] { + return baseNillableArrayNormalValue[T]{newBaseArrayNormalValue(val)} +} + +type normalBoolNillableArray struct { + baseNillableArrayNormalValue[[]bool] +} + +func (v normalBoolNillableArray) BoolNillableArray() (immutable.Option[[]bool], bool) { + return v.val, true +} + +type normalIntNillableArray struct { + baseNillableArrayNormalValue[[]int64] +} + +func (v normalIntNillableArray) IntNillableArray() (immutable.Option[[]int64], bool) { + return v.val, true +} + +type normalFloatNillableArray struct { + baseNillableArrayNormalValue[[]float64] +} + +func (v normalFloatNillableArray) FloatNillableArray() (immutable.Option[[]float64], bool) { + return v.val, true +} + +type normalStringNillableArray struct { + baseNillableArrayNormalValue[[]string] +} + +func (v normalStringNillableArray) StringNillableArray() (immutable.Option[[]string], bool) { + return v.val, true +} + +type normalBytesNillableArray struct { + baseNillableArrayNormalValue[[][]byte] +} + +func (v normalBytesNillableArray) BytesNillableArray() (immutable.Option[[][]byte], bool) { + return v.val, true +} + +type normalTimeNillableArray struct { + baseNillableArrayNormalValue[[]time.Time] +} + +func (v normalTimeNillableArray) TimeNillableArray() (immutable.Option[[]time.Time], bool) { + return v.val, true +} + +type normalDocumentNillableArray struct { + baseNillableArrayNormalValue[[]*Document] +} + +func (v normalDocumentNillableArray) DocumentNillableArray() (immutable.Option[[]*Document], bool) { + return v.val, true +} + +// NewNormalNillableBoolArray creates a new NormalValue that represents a `immutable.Option[[]bool]` value. +func NewNormalBoolNillableArray(val immutable.Option[[]bool]) NormalValue { + return normalBoolNillableArray{newBaseNillableArrayNormalValue(val)} +} + +// NewNormalNillableIntArray creates a new NormalValue that represents a `immutable.Option[[]int64]` value. +func NewNormalIntNillableArray[T constraints.Integer | constraints.Float](val immutable.Option[[]T]) NormalValue { + return normalIntNillableArray{newBaseNillableArrayNormalValue(normalizeNumNillableArr[int64](val))} +} + +// NewNormalNillableFloatArray creates a new NormalValue that represents a `immutable.Option[[]float64]` value. +func NewNormalFloatNillableArray[T constraints.Integer | constraints.Float](val immutable.Option[[]T]) NormalValue { + return normalFloatNillableArray{newBaseNillableArrayNormalValue(normalizeNumNillableArr[float64](val))} +} + +// NewNormalNillableStringArray creates a new NormalValue that represents a `immutable.Option[[]string]` value. +func NewNormalStringNillableArray[T string | []byte](val immutable.Option[[]T]) NormalValue { + return normalStringNillableArray{newBaseNillableArrayNormalValue(normalizeCharsNillableArr[string](val))} +} + +// NewNormalNillableBytesArray creates a new NormalValue that represents a `immutable.Option[[][]byte]` value. +func NewNormalBytesNillableArray[T string | []byte](val immutable.Option[[]T]) NormalValue { + return normalBytesNillableArray{newBaseNillableArrayNormalValue(normalizeCharsNillableArr[[]byte](val))} +} + +// NewNormalNillableTimeArray creates a new NormalValue that represents a `immutable.Option[[]time.Time]` value. +func NewNormalTimeNillableArray(val immutable.Option[[]time.Time]) NormalValue { + return normalTimeNillableArray{newBaseNillableArrayNormalValue(val)} +} + +// NewNormalNillableDocumentArray creates a new NormalValue that represents a `immutable.Option[[]*Document]` value. +func NewNormalDocumentNillableArray(val immutable.Option[[]*Document]) NormalValue { + return normalDocumentNillableArray{newBaseNillableArrayNormalValue(val)} +} + +func normalizeNumNillableArr[R int64 | float64, T constraints.Integer | constraints.Float]( + val immutable.Option[[]T], +) immutable.Option[[]R] { + if val.HasValue() { + return immutable.Some(normalizeNumArr[R](val.Value())) + } + return immutable.None[[]R]() +} + +func normalizeCharsNillableArr[R string | []byte, T string | []byte](val immutable.Option[[]T]) immutable.Option[[]R] { + if val.HasValue() { + return immutable.Some(normalizeCharsArr[R](val.Value())) + } + return immutable.None[[]R]() +} diff --git a/client/normal_nillable_array_of_nillables.go b/client/normal_nillable_array_of_nillables.go new file mode 100644 index 0000000000..3594186ba2 --- /dev/null +++ b/client/normal_nillable_array_of_nillables.go @@ -0,0 +1,160 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" + "golang.org/x/exp/constraints" +) + +type normalNillableBoolNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[bool]] +} + +func (v normalNillableBoolNillableArray) NillableBoolNillableArray() ( + immutable.Option[[]immutable.Option[bool]], bool, +) { + return v.val, true +} + +type normalNillableIntNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[int64]] +} + +func (v normalNillableIntNillableArray) NillableIntNillableArray() ( + immutable.Option[[]immutable.Option[int64]], bool, +) { + return v.val, true +} + +type normalNillableFloatNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[float64]] +} + +func (v normalNillableFloatNillableArray) NillableFloatNillableArray() ( + immutable.Option[[]immutable.Option[float64]], bool, +) { + return v.val, true +} + +type normalNillableStringNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[string]] +} + +func (v normalNillableStringNillableArray) NillableStringNillableArray() ( + immutable.Option[[]immutable.Option[string]], bool, +) { + return v.val, true +} + +type normalNillableBytesNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[[]byte]] +} + +func (v normalNillableBytesNillableArray) NillableBytesNillableArray() ( + immutable.Option[[]immutable.Option[[]byte]], bool, +) { + return v.val, true +} + +type normalNillableTimeNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[time.Time]] +} + +func (v normalNillableTimeNillableArray) NillableTimeNillableArray() ( + immutable.Option[[]immutable.Option[time.Time]], bool, +) { + return v.val, true +} + +type normalNillableDocumentNillableArray struct { + baseNillableArrayNormalValue[[]immutable.Option[*Document]] +} + +func (v normalNillableDocumentNillableArray) NillableDocumentNillableArray() ( + immutable.Option[[]immutable.Option[*Document]], bool, +) { + return v.val, true +} + +// NewNormalNillableBoolNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[bool]]` value. +func NewNormalNillableBoolNillableArray(val immutable.Option[[]immutable.Option[bool]]) NormalValue { + return normalNillableBoolNillableArray{newBaseNillableArrayNormalValue(val)} +} + +// NewNormalNillableIntNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[int64]]` value. +func NewNormalNillableIntNillableArray[T constraints.Integer | constraints.Float]( + val immutable.Option[[]immutable.Option[T]], +) NormalValue { + return normalNillableIntNillableArray{ + newBaseNillableArrayNormalValue(normalizeNillableNumNillableArr[int64](val)), + } +} + +// NewNormalNillableFloatNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[float64]]` value. +func NewNormalNillableFloatNillableArray[T constraints.Integer | constraints.Float]( + val immutable.Option[[]immutable.Option[T]], +) NormalValue { + return normalNillableFloatNillableArray{ + newBaseNillableArrayNormalValue(normalizeNillableNumNillableArr[float64](val)), + } +} + +// NewNormalNillableStringNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[string]]` value. +func NewNormalNillableStringNillableArray[T string | []byte](val immutable.Option[[]immutable.Option[T]]) NormalValue { + return normalNillableStringNillableArray{ + newBaseNillableArrayNormalValue(normalizeNillableCharsNillableArr[string](val)), + } +} + +// NewNormalNillableBytesNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[[]byte]]` value. +func NewNormalNillableBytesNillableArray[T string | []byte](val immutable.Option[[]immutable.Option[T]]) NormalValue { + return normalNillableBytesNillableArray{ + newBaseNillableArrayNormalValue(normalizeNillableCharsNillableArr[[]byte](val)), + } +} + +// NewNormalNillableTimeNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[time.Time]]` value. +func NewNormalNillableTimeNillableArray(val immutable.Option[[]immutable.Option[time.Time]]) NormalValue { + return normalNillableTimeNillableArray{newBaseNillableArrayNormalValue(val)} +} + +// NewNormalNillableDocumentNillableArray creates a new NormalValue that represents a +// `immutable.Option[[]immutable.Option[*Document]]` value. +func NewNormalNillableDocumentNillableArray(val immutable.Option[[]immutable.Option[*Document]]) NormalValue { + return normalNillableDocumentNillableArray{newBaseNillableArrayNormalValue(val)} +} + +func normalizeNillableNumNillableArr[R int64 | float64, T constraints.Integer | constraints.Float]( + val immutable.Option[[]immutable.Option[T]], +) immutable.Option[[]immutable.Option[R]] { + if val.HasValue() { + return immutable.Some(normalizeNillableNumArr[R](val.Value())) + } + return immutable.None[[]immutable.Option[R]]() +} + +func normalizeNillableCharsNillableArr[R string | []byte, T string | []byte]( + val immutable.Option[[]immutable.Option[T]], +) immutable.Option[[]immutable.Option[R]] { + if val.HasValue() { + return immutable.Some(normalizeNillableCharsArr[R](val.Value())) + } + return immutable.None[[]immutable.Option[R]]() +} diff --git a/client/normal_nillable_scalar.go b/client/normal_nillable_scalar.go new file mode 100644 index 0000000000..88876c9d7e --- /dev/null +++ b/client/normal_nillable_scalar.go @@ -0,0 +1,148 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" + "golang.org/x/exp/constraints" +) + +type baseNillableNormalValue[T any] struct { + baseNormalValue[immutable.Option[T]] +} + +func (v baseNillableNormalValue[T]) Unwrap() any { + if v.val.HasValue() { + return v.val.Value() + } + return nil +} + +func (v baseNillableNormalValue[T]) IsNil() bool { + return !v.val.HasValue() +} + +func (v baseNillableNormalValue[T]) IsNillable() bool { + return true +} + +func newBaseNillableNormalValue[T any](val immutable.Option[T]) baseNillableNormalValue[T] { + return baseNillableNormalValue[T]{newBaseNormalValue(val)} +} + +type normalNillableBool struct { + baseNillableNormalValue[bool] +} + +func (v normalNillableBool) NillableBool() (immutable.Option[bool], bool) { + return v.val, true +} + +type normalNillableInt struct { + baseNillableNormalValue[int64] +} + +func (v normalNillableInt) NillableInt() (immutable.Option[int64], bool) { + return v.val, true +} + +type normalNillableFloat struct { + baseNillableNormalValue[float64] +} + +func (v normalNillableFloat) NillableFloat() (immutable.Option[float64], bool) { + return v.val, true +} + +type normalNillableString struct { + baseNillableNormalValue[string] +} + +func (v normalNillableString) NillableString() (immutable.Option[string], bool) { + return v.val, true +} + +type normalNillableBytes struct { + baseNillableNormalValue[[]byte] +} + +func (v normalNillableBytes) NillableBytes() (immutable.Option[[]byte], bool) { + return v.val, true +} + +type normalNillableTime struct { + baseNillableNormalValue[time.Time] +} + +func (v normalNillableTime) NillableTime() (immutable.Option[time.Time], bool) { + return v.val, true +} + +type normalNillableDocument struct { + baseNillableNormalValue[*Document] +} + +func (v normalNillableDocument) NillableDocument() (immutable.Option[*Document], bool) { + return v.val, true +} + +// NewNormalNillableBool creates a new NormalValue that represents a `immutable.Option[bool]` value. +func NewNormalNillableBool(val immutable.Option[bool]) NormalValue { + return normalNillableBool{newBaseNillableNormalValue(val)} +} + +// NewNormalNillableInt creates a new NormalValue that represents a `immutable.Option[int64]` value. +func NewNormalNillableInt[T constraints.Integer | constraints.Float](val immutable.Option[T]) NormalValue { + return normalNillableInt{newBaseNillableNormalValue(normalizeNillableNum[int64](val))} +} + +// NewNormalNillableFloat creates a new NormalValue that represents a `immutable.Option[float64]` value. +func NewNormalNillableFloat[T constraints.Integer | constraints.Float](val immutable.Option[T]) NormalValue { + return normalNillableFloat{newBaseNillableNormalValue(normalizeNillableNum[float64](val))} +} + +// NewNormalNillableString creates a new NormalValue that represents a `immutable.Option[string]` value. +func NewNormalNillableString[T string | []byte](val immutable.Option[T]) NormalValue { + return normalNillableString{newBaseNillableNormalValue(normalizeNillableChars[string](val))} +} + +// NewNormalNillableBytes creates a new NormalValue that represents a `immutable.Option[[]byte]` value. +func NewNormalNillableBytes[T string | []byte](val immutable.Option[T]) NormalValue { + return normalNillableBytes{newBaseNillableNormalValue(normalizeNillableChars[[]byte](val))} +} + +// NewNormalNillableTime creates a new NormalValue that represents a `immutable.Option[time.Time]` value. +func NewNormalNillableTime(val immutable.Option[time.Time]) NormalValue { + return normalNillableTime{newBaseNillableNormalValue(val)} +} + +// NewNormalNillableDocument creates a new NormalValue that represents a `immutable.Option[*Document]` value. +func NewNormalNillableDocument(val immutable.Option[*Document]) NormalValue { + return normalNillableDocument{newBaseNillableNormalValue(val)} +} + +func normalizeNillableNum[R int64 | float64, T constraints.Integer | constraints.Float]( + val immutable.Option[T], +) immutable.Option[R] { + if val.HasValue() { + return immutable.Some(R(val.Value())) + } + return immutable.None[R]() +} + +func normalizeNillableChars[R string | []byte, T string | []byte](val immutable.Option[T]) immutable.Option[R] { + if val.HasValue() { + return immutable.Some(R(val.Value())) + } + return immutable.None[R]() +} diff --git a/client/normal_scalar.go b/client/normal_scalar.go new file mode 100644 index 0000000000..f4378f5474 --- /dev/null +++ b/client/normal_scalar.go @@ -0,0 +1,130 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "golang.org/x/exp/constraints" +) + +// NormalValue is dummy implementation of NormalValue to be embedded in other types. +type baseNormalValue[T any] struct { + NormalVoid + val T +} + +func (v baseNormalValue[T]) Unwrap() any { + return v.val +} + +func newBaseNormalValue[T any](val T) baseNormalValue[T] { + return baseNormalValue[T]{val: val} +} + +type normalBool struct { + baseNormalValue[bool] +} + +func (v normalBool) Bool() (bool, bool) { + return v.val, true +} + +type normalInt struct { + baseNormalValue[int64] +} + +func (v normalInt) Int() (int64, bool) { + return v.val, true +} + +type normalFloat struct { + baseNormalValue[float64] +} + +func (v normalFloat) Float() (float64, bool) { + return v.val, true +} + +type normalString struct { + baseNormalValue[string] +} + +func (v normalString) String() (string, bool) { + return v.val, true +} + +type normalBytes struct { + baseNormalValue[[]byte] +} + +func (v normalBytes) Bytes() ([]byte, bool) { + return v.val, true +} + +type normalTime struct { + baseNormalValue[time.Time] +} + +func (v normalTime) Time() (time.Time, bool) { + return v.val, true +} + +type normalDocument struct { + baseNormalValue[*Document] +} + +func (v normalDocument) Document() (*Document, bool) { + return v.val, true +} + +func newNormalInt(val int64) NormalValue { + return normalInt{newBaseNormalValue(val)} +} + +func newNormalFloat(val float64) NormalValue { + return normalFloat{newBaseNormalValue(val)} +} + +// NewNormalBool creates a new NormalValue that represents a `bool` value. +func NewNormalBool(val bool) NormalValue { + return normalBool{baseNormalValue[bool]{val: val}} +} + +// NewNormalInt creates a new NormalValue that represents an `int64` value. +func NewNormalInt[T constraints.Integer | constraints.Float](val T) NormalValue { + return normalInt{baseNormalValue[int64]{val: int64(val)}} +} + +// NewNormalFloat creates a new NormalValue that represents a `float64` value. +func NewNormalFloat[T constraints.Integer | constraints.Float](val T) NormalValue { + return normalFloat{baseNormalValue[float64]{val: float64(val)}} +} + +// NewNormalString creates a new NormalValue that represents a `string` value. +func NewNormalString[T string | []byte](val T) NormalValue { + return normalString{baseNormalValue[string]{val: string(val)}} +} + +// NewNormalBytes creates a new NormalValue that represents a `[]byte` value. +func NewNormalBytes[T string | []byte](val T) NormalValue { + return normalBytes{baseNormalValue[[]byte]{val: []byte(val)}} +} + +// NewNormalTime creates a new NormalValue that represents a `time.Time` value. +func NewNormalTime(val time.Time) NormalValue { + return normalTime{baseNormalValue[time.Time]{val: val}} +} + +// NewNormalDocument creates a new NormalValue that represents a `*Document` value. +func NewNormalDocument(val *Document) NormalValue { + return normalDocument{baseNormalValue[*Document]{val: val}} +} diff --git a/client/normal_util.go b/client/normal_util.go new file mode 100644 index 0000000000..87310d9631 --- /dev/null +++ b/client/normal_util.go @@ -0,0 +1,118 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +// ToArrayOfNormalValues converts a NormalValue into a slice of NormalValue if the given value +// is an array. If the given value is not an array, an error is returned. +func ToArrayOfNormalValues(val NormalValue) ([]NormalValue, error) { + if !val.IsArray() { + return nil, NewCanNotTurnNormalValueIntoArray(val) + } + if !val.IsNillable() { + if v, ok := val.BoolArray(); ok { + return toNormalArray(v, NewNormalBool), nil + } + if v, ok := val.IntArray(); ok { + return toNormalArray(v, NewNormalInt), nil + } + if v, ok := val.FloatArray(); ok { + return toNormalArray(v, NewNormalFloat), nil + } + if v, ok := val.StringArray(); ok { + return toNormalArray(v, NewNormalString), nil + } + if v, ok := val.BytesArray(); ok { + return toNormalArray(v, NewNormalBytes), nil + } + if v, ok := val.TimeArray(); ok { + return toNormalArray(v, NewNormalTime), nil + } + if v, ok := val.DocumentArray(); ok { + return toNormalArray(v, NewNormalDocument), nil + } + if v, ok := val.NillableBoolArray(); ok { + return toNormalArray(v, NewNormalNillableBool), nil + } + if v, ok := val.NillableIntArray(); ok { + return toNormalArray(v, NewNormalNillableInt), nil + } + if v, ok := val.NillableFloatArray(); ok { + return toNormalArray(v, NewNormalNillableFloat), nil + } + if v, ok := val.NillableStringArray(); ok { + return toNormalArray(v, NewNormalNillableString), nil + } + if v, ok := val.NillableBytesArray(); ok { + return toNormalArray(v, NewNormalNillableBytes), nil + } + if v, ok := val.NillableTimeArray(); ok { + return toNormalArray(v, NewNormalNillableTime), nil + } + if v, ok := val.NillableDocumentArray(); ok { + return toNormalArray(v, NewNormalNillableDocument), nil + } + } else { + if val.IsNil() { + return nil, nil + } + if v, ok := val.NillableBoolNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableBool), nil + } + if v, ok := val.NillableIntNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableInt), nil + } + if v, ok := val.NillableFloatNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableFloat), nil + } + if v, ok := val.NillableStringNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableString), nil + } + if v, ok := val.NillableBytesNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableBytes), nil + } + if v, ok := val.NillableTimeNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableTime), nil + } + if v, ok := val.NillableDocumentNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalNillableDocument), nil + } + if v, ok := val.BoolNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalBool), nil + } + if v, ok := val.IntNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalInt), nil + } + if v, ok := val.FloatNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalFloat), nil + } + if v, ok := val.StringNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalString), nil + } + if v, ok := val.BytesNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalBytes), nil + } + if v, ok := val.TimeNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalTime), nil + } + if v, ok := val.DocumentNillableArray(); ok { + return toNormalArray(v.Value(), NewNormalDocument), nil + } + } + return nil, NewCanNotTurnNormalValueIntoArray(val) +} + +func toNormalArray[T any](val []T, f func(T) NormalValue) []NormalValue { + res := make([]NormalValue, len(val)) + for i := range val { + res[i] = f(val[i]) + } + return res +} diff --git a/client/normal_value.go b/client/normal_value.go new file mode 100644 index 0000000000..3f0681fbfc --- /dev/null +++ b/client/normal_value.go @@ -0,0 +1,207 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" +) + +// NormalValue is the interface for the normal value types. +// It is used to represent the normal (or standard) values across the system and to avoid +// asserting all possible types like int, int32, int64, etc. +// +// All methods returning a specific type returns the value and the second boolean flag indicating +// if the value is of the requested type. They act similar to Go's type assertion. +// +// All nillable values are represented as [immutable.Option[T]]. +type NormalValue interface { + // Unwrap returns the underlying value. + // For not nillable values it will return the value as is. + // For nillable values (of type [immutable.Option[T]]) it will return the value itself + // if the option has value, otherwise it will return nil. + Unwrap() any + + // IsNil returns if the value is nil. For not nillable values it will always return false. + IsNil() bool + // IsNillable returns if the value can be nil. + IsNillable() bool + // IsArray returns if the value is an array. + IsArray() bool + + // Bool returns the value as a bool. The second return flag is true if the value is a bool. + // Otherwise it will return false and false. + Bool() (bool, bool) + // Int returns the value as an int64. The second return flag is true if the value is an int64. + // Otherwise it will return 0 and false. + Int() (int64, bool) + // Float returns the value as a float64. The second return flag is true if the value is a float64. + // Otherwise it will return 0 and false. + Float() (float64, bool) + // String returns the value as a string. The second return flag is true if the value is a string. + // Otherwise it will return "" and false. + String() (string, bool) + // Bytes returns the value as a []byte. The second return flag is true if the value is a []byte. + // Otherwise it will return nil and false. + Bytes() ([]byte, bool) + // Time returns the value as a [time.Time]. The second return flag is true if the value is a [time.Time]. + // Otherwise it will return nil and false. + Time() (time.Time, bool) + // Document returns the value as a [*Document]. The second return flag is true if the value is a [*Document]. + // Otherwise it will return nil and false. + Document() (*Document, bool) + + // NillableBool returns the value as a nillable bool. + // The second return flag is true if the value is [immutable.Option[bool]]. + // Otherwise it will return [immutable.None[bool]()] and false. + NillableBool() (immutable.Option[bool], bool) + // NillableInt returns the value as a nillable int64. + // The second return flag is true if the value is [immutable.Option[int64]]. + // Otherwise it will return [immutable.None[int64]()] and false. + NillableInt() (immutable.Option[int64], bool) + // NillableFloat returns the value as a nillable float64. + // The second return flag is true if the value is [immutable.Option[float64]]. + // Otherwise it will return [immutable.None[float64]()] and false. + NillableFloat() (immutable.Option[float64], bool) + // NillableString returns the value as a nillable string. + // The second return flag is true if the value is [immutable.Option[string]]. + // Otherwise it will return [immutable.None[string]()] and false. + NillableString() (immutable.Option[string], bool) + // NillableBytes returns the value as a nillable byte slice. + // The second return flag is true if the value is [immutable.Option[[]byte]]. + // Otherwise it will return [immutable.None[[]byte]()] and false. + NillableBytes() (immutable.Option[[]byte], bool) + // NillableTime returns the value as a nillable time.Time. + // The second return flag is true if the value is [immutable.Option[time.Time]]. + // Otherwise it will return [immutable.None[time.Time]()] and false. + NillableTime() (immutable.Option[time.Time], bool) + // NillableDocument returns the value as a nillable *Document. + // The second return flag is true if the value is [immutable.Option[*Document]]. + // Otherwise it will return [immutable.None[*Document]()] and false. + NillableDocument() (immutable.Option[*Document], bool) + + // BoolArray returns the value as a bool array. + // The second return flag is true if the value is a []bool. + // Otherwise it will return nil and false. + BoolArray() ([]bool, bool) + // IntArray returns the value as an int64 array. + // The second return flag is true if the value is a []int64. + // Otherwise it will return nil and false. + IntArray() ([]int64, bool) + // FloatArray returns the value as a float64 array. + // The second return flag is true if the value is a []float64. + // Otherwise it will return nil and false. + FloatArray() ([]float64, bool) + // StringArray returns the value as a string array. + // The second return flag is true if the value is a []string. + // Otherwise it will return nil and false. + StringArray() ([]string, bool) + // BytesArray returns the value as a byte slice array. + // The second return flag is true if the value is a [][]byte. + // Otherwise it will return nil and false. + BytesArray() ([][]byte, bool) + // TimeArray returns the value as a time.Time array. + // The second return flag is true if the value is a [[]time.Time]. + // Otherwise it will return nil and false. + TimeArray() ([]time.Time, bool) + // DocumentArray returns the value as a [*Document] array. + // The second return flag is true if the value is a [[]*Document]. + // Otherwise it will return nil and false. + DocumentArray() ([]*Document, bool) + + // NillableBoolArray returns the value as nillable array of bool elements. + // The second return flag is true if the value is [immutable.Option[[]bool]]. + // Otherwise it will return [immutable.None[[]bool]()] and false. + BoolNillableArray() (immutable.Option[[]bool], bool) + // NillableIntArray returns the value as nillable array of int64 elements. + // The second return flag is true if the value is [immutable.Option[[]int64]]. + // Otherwise it will return [immutable.None[[]int64]()] and false. + IntNillableArray() (immutable.Option[[]int64], bool) + // NillableFloatArray returns the value as nillable array of float64 elements. + // The second return flag is true if the value is [immutable.Option[[]float64]]. + // Otherwise it will return [immutable.None[[]float64]()] and false. + FloatNillableArray() (immutable.Option[[]float64], bool) + // NillableStringArray returns the value as nillable array of string elements. + // The second return flag is true if the value is [immutable.Option[[]string]]. + // Otherwise it will return [immutable.None[[]string]()] and false. + StringNillableArray() (immutable.Option[[]string], bool) + // NillableBytesArray returns the value as nillable array of byte slice elements. + // The second return flag is true if the value is [immutable.Option[[][]byte]]. + // Otherwise it will return [immutable.None[[][]byte]()] and false. + BytesNillableArray() (immutable.Option[[][]byte], bool) + // NillableTimeArray returns the value as nillable array of [time.Time] elements. + // The second return flag is true if the value is [immutable.Option[[]time.Time]]. + // Otherwise it will return [immutable.None[[]time.Time]()] and false. + TimeNillableArray() (immutable.Option[[]time.Time], bool) + // NillableDocumentArray returns the value as nillable array of [*Document] elements. + // The second return flag is true if the value is [immutable.Option[[]*Document]]. + // Otherwise it will return [immutable.None[[]*Document]()] and false. + DocumentNillableArray() (immutable.Option[[]*Document], bool) + + // NillableBoolArray returns the value as array of nillable bool elements. + // The second return flag is true if the value is []immutable.Option[bool]. + // Otherwise it will return nil and false. + NillableBoolArray() ([]immutable.Option[bool], bool) + // NillableIntArray returns the value as array of nillable int64 elements. + // The second return flag is true if the value is []immutable.Option[int64]. + // Otherwise it will return nil and false. + NillableIntArray() ([]immutable.Option[int64], bool) + // NillableFloatArray returns the value as array of nillable float64 elements. + // The second return flag is true if the value is []immutable.Option[float64]. + // Otherwise it will return nil and false. + NillableFloatArray() ([]immutable.Option[float64], bool) + // NillableStringArray returns the value as array of nillable string elements. + // The second return flag is true if the value is []immutable.Option[string]. + // Otherwise it will return nil and false. + NillableStringArray() ([]immutable.Option[string], bool) + // NillableBytesArray returns the value as array of nillable byte slice elements. + // The second return flag is true if the value is []immutable.Option[[]byte]. + // Otherwise it will return nil and false. + NillableBytesArray() ([]immutable.Option[[]byte], bool) + // NillableTimeArray returns the value as array of nillable time.Time elements. + // The second return flag is true if the value is []immutable.Option[time.Time]. + // Otherwise it will return nil and false. + NillableTimeArray() ([]immutable.Option[time.Time], bool) + // NillableDocumentArray returns the value as array of nillable *Document elements. + // The second return flag is true if the value is []immutable.Option[*Document]. + // Otherwise it will return nil and false. + NillableDocumentArray() ([]immutable.Option[*Document], bool) + + // NillableBoolNillableArray returns the value as nillable array of nillable bool elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[bool]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[bool]]()] and false. + NillableBoolNillableArray() (immutable.Option[[]immutable.Option[bool]], bool) + // NillableIntNillableArray returns the value as nillable array of nillable int64 elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[int64]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[int64]]()] and false. + NillableIntNillableArray() (immutable.Option[[]immutable.Option[int64]], bool) + // NillableFloatNillableArray returns the value as nillable array of nillable float64 elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[float64]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[float64]]()] and false. + NillableFloatNillableArray() (immutable.Option[[]immutable.Option[float64]], bool) + // NillableStringNillableArray returns the value as nillable array of nillable string elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[string]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[string]]()] and false. + NillableStringNillableArray() (immutable.Option[[]immutable.Option[string]], bool) + // NillableBytesNillableArray returns the value as nillable array of nillable byte slice elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[[]byte]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[[]byte]]()] and false. + NillableBytesNillableArray() (immutable.Option[[]immutable.Option[[]byte]], bool) + // NillableTimeNillableArray returns the value as nillable array of nillable time.Time elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[time.Time]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[time.Time]]()] and false. + NillableTimeNillableArray() (immutable.Option[[]immutable.Option[time.Time]], bool) + // NillableDocumentNillableArray returns the value as nillable array of nillable *Document elements. + // The second return flag is true if the value is [immutable.Option[[]immutable.Option[*Document]]]. + // Otherwise it will return [immutable.None[[]immutable.Option[*Document]]()] and false. + NillableDocumentNillableArray() (immutable.Option[[]immutable.Option[*Document]], bool) +} diff --git a/client/normal_value_test.go b/client/normal_value_test.go new file mode 100644 index 0000000000..75e858b056 --- /dev/null +++ b/client/normal_value_test.go @@ -0,0 +1,1624 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "reflect" + "testing" + "time" + + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type nType string + +const ( + BoolType nType = "Bool" + IntType nType = "Int" + FloatType nType = "Float" + StringType nType = "String" + BytesType nType = "Bytes" + TimeType nType = "Time" + DocumentType nType = "Document" + + NillableBoolType nType = "NillableBool" + NillableIntType nType = "NillableInt" + NillableFloatType nType = "NillableFloat" + NillableStringType nType = "NillableString" + NillableBytesType nType = "NillableBytes" + NillableTimeType nType = "NillableTime" + NillableDocumentType nType = "NillableDocument" + + BoolArray nType = "BoolArray" + IntArray nType = "IntArray" + FloatArray nType = "FloatArray" + StringArray nType = "StringArray" + BytesArray nType = "BytesArray" + TimeArray nType = "TimeArray" + DocumentArray nType = "DocumentArray" + + NillableBoolArray nType = "NillableBoolArray" + NillableIntArray nType = "NillableIntArray" + NillableFloatArray nType = "NillableFloatArray" + NillableStringArray nType = "NillableStringArray" + NillableBytesArray nType = "NillableBytesArray" + NillableTimeArray nType = "NillableTimeArray" + NillableDocumentArray nType = "NillableDocumentArray" + + BoolNillableArray nType = "BoolNillableArray" + IntNillableArray nType = "IntNillableArray" + FloatNillableArray nType = "FloatNillableArray" + StringNillableArray nType = "StringNillableArray" + BytesNillableArray nType = "BytesNillableArray" + TimeNillableArray nType = "TimeNillableArray" + DocumentNillableArray nType = "DocumentNillableArray" + + NillableBoolNillableArray nType = "NillableBoolNillableArray" + NillableIntNillableArray nType = "NillableIntNillableArray" + NillableFloatNillableArray nType = "NillableFloatNillableArray" + NillableStringNillableArray nType = "NillableStringNillableArray" + NillableBytesNillableArray nType = "NillableBytesNillableArray" + NillableTimeNillableArray nType = "NillableTimeNillableArray" + NillableDocumentNillableArray nType = "NillableDocumentNillableArray" +) + +// extractValue takes an input of type `any` and checks if it is an `Option[T]`. +// If it is and contains a value, it returns the contained value. +// Otherwise, it returns the input itself. +func extractValue(input any) any { + inputVal := reflect.ValueOf(input) + + // Check if the type is Option[T] by seeing if it has the HasValue and Value methods. + hasValueMethod := inputVal.MethodByName("HasValue") + valueMethod := inputVal.MethodByName("Value") + + if hasValueMethod.IsValid() && valueMethod.IsValid() { + // Call HasValue to check if there's a value. + hasValueResult := hasValueMethod.Call(nil) + if len(hasValueResult) == 1 { + if hasValueResult[0].Bool() { + // Call Value to get the actual value if HasValue is true. + valueResult := valueMethod.Call(nil) + if len(valueResult) == 1 { + return valueResult[0].Interface() + } + } else { + // Return nil if HasValue is false. + return nil + } + } + } + + // Return the input itself if it's not an Option[T] with a value. + return input +} + +func TestNormalValue_NewValueAndTypeAssertion(t *testing.T) { + typeAssertMap := map[nType]func(NormalValue) (any, bool){ + BoolType: func(v NormalValue) (any, bool) { return v.Bool() }, + IntType: func(v NormalValue) (any, bool) { return v.Int() }, + FloatType: func(v NormalValue) (any, bool) { return v.Float() }, + StringType: func(v NormalValue) (any, bool) { return v.String() }, + BytesType: func(v NormalValue) (any, bool) { return v.Bytes() }, + TimeType: func(v NormalValue) (any, bool) { return v.Time() }, + DocumentType: func(v NormalValue) (any, bool) { return v.Document() }, + + NillableBoolType: func(v NormalValue) (any, bool) { return v.NillableBool() }, + NillableIntType: func(v NormalValue) (any, bool) { return v.NillableInt() }, + NillableFloatType: func(v NormalValue) (any, bool) { return v.NillableFloat() }, + NillableStringType: func(v NormalValue) (any, bool) { return v.NillableString() }, + NillableBytesType: func(v NormalValue) (any, bool) { return v.NillableBytes() }, + NillableTimeType: func(v NormalValue) (any, bool) { return v.NillableTime() }, + NillableDocumentType: func(v NormalValue) (any, bool) { return v.NillableDocument() }, + + BoolArray: func(v NormalValue) (any, bool) { return v.BoolArray() }, + IntArray: func(v NormalValue) (any, bool) { return v.IntArray() }, + FloatArray: func(v NormalValue) (any, bool) { return v.FloatArray() }, + StringArray: func(v NormalValue) (any, bool) { return v.StringArray() }, + BytesArray: func(v NormalValue) (any, bool) { return v.BytesArray() }, + TimeArray: func(v NormalValue) (any, bool) { return v.TimeArray() }, + DocumentArray: func(v NormalValue) (any, bool) { return v.DocumentArray() }, + + BoolNillableArray: func(v NormalValue) (any, bool) { return v.BoolNillableArray() }, + IntNillableArray: func(v NormalValue) (any, bool) { return v.IntNillableArray() }, + FloatNillableArray: func(v NormalValue) (any, bool) { return v.FloatNillableArray() }, + StringNillableArray: func(v NormalValue) (any, bool) { return v.StringNillableArray() }, + BytesNillableArray: func(v NormalValue) (any, bool) { return v.BytesNillableArray() }, + TimeNillableArray: func(v NormalValue) (any, bool) { return v.TimeNillableArray() }, + DocumentNillableArray: func(v NormalValue) (any, bool) { return v.DocumentNillableArray() }, + + NillableBoolArray: func(v NormalValue) (any, bool) { return v.NillableBoolArray() }, + NillableIntArray: func(v NormalValue) (any, bool) { return v.NillableIntArray() }, + NillableFloatArray: func(v NormalValue) (any, bool) { return v.NillableFloatArray() }, + NillableStringArray: func(v NormalValue) (any, bool) { return v.NillableStringArray() }, + NillableBytesArray: func(v NormalValue) (any, bool) { return v.NillableBytesArray() }, + NillableTimeArray: func(v NormalValue) (any, bool) { return v.NillableTimeArray() }, + NillableDocumentArray: func(v NormalValue) (any, bool) { return v.NillableDocumentArray() }, + + NillableBoolNillableArray: func(v NormalValue) (any, bool) { return v.NillableBoolNillableArray() }, + NillableIntNillableArray: func(v NormalValue) (any, bool) { return v.NillableIntNillableArray() }, + NillableFloatNillableArray: func(v NormalValue) (any, bool) { return v.NillableFloatNillableArray() }, + NillableStringNillableArray: func(v NormalValue) (any, bool) { return v.NillableStringNillableArray() }, + NillableBytesNillableArray: func(v NormalValue) (any, bool) { return v.NillableBytesNillableArray() }, + NillableTimeNillableArray: func(v NormalValue) (any, bool) { return v.NillableTimeNillableArray() }, + NillableDocumentNillableArray: func(v NormalValue) (any, bool) { + return v.NillableDocumentNillableArray() + }, + } + + newMap := map[nType]func(any) NormalValue{ + BoolType: func(v any) NormalValue { return NewNormalBool(v.(bool)) }, + IntType: func(v any) NormalValue { return NewNormalInt(v.(int64)) }, + FloatType: func(v any) NormalValue { return NewNormalFloat(v.(float64)) }, + StringType: func(v any) NormalValue { return NewNormalString(v.(string)) }, + BytesType: func(v any) NormalValue { return NewNormalBytes(v.([]byte)) }, + TimeType: func(v any) NormalValue { return NewNormalTime(v.(time.Time)) }, + DocumentType: func(v any) NormalValue { return NewNormalDocument(v.(*Document)) }, + + NillableBoolType: func(v any) NormalValue { return NewNormalNillableBool(v.(immutable.Option[bool])) }, + NillableIntType: func(v any) NormalValue { return NewNormalNillableInt(v.(immutable.Option[int64])) }, + NillableFloatType: func(v any) NormalValue { return NewNormalNillableFloat(v.(immutable.Option[float64])) }, + NillableStringType: func(v any) NormalValue { return NewNormalNillableString(v.(immutable.Option[string])) }, + NillableBytesType: func(v any) NormalValue { return NewNormalNillableBytes(v.(immutable.Option[[]byte])) }, + NillableTimeType: func(v any) NormalValue { return NewNormalNillableTime(v.(immutable.Option[time.Time])) }, + NillableDocumentType: func(v any) NormalValue { return NewNormalNillableDocument(v.(immutable.Option[*Document])) }, + + BoolArray: func(v any) NormalValue { return NewNormalBoolArray(v.([]bool)) }, + IntArray: func(v any) NormalValue { return NewNormalIntArray(v.([]int64)) }, + FloatArray: func(v any) NormalValue { return NewNormalFloatArray(v.([]float64)) }, + StringArray: func(v any) NormalValue { return NewNormalStringArray(v.([]string)) }, + BytesArray: func(v any) NormalValue { return NewNormalBytesArray(v.([][]byte)) }, + TimeArray: func(v any) NormalValue { return NewNormalTimeArray(v.([]time.Time)) }, + DocumentArray: func(v any) NormalValue { return NewNormalDocumentArray(v.([]*Document)) }, + + NillableBoolArray: func(v any) NormalValue { + return NewNormalNillableBoolArray(v.([]immutable.Option[bool])) + }, + NillableIntArray: func(v any) NormalValue { + return NewNormalNillableIntArray(v.([]immutable.Option[int64])) + }, + NillableFloatArray: func(v any) NormalValue { + return NewNormalNillableFloatArray(v.([]immutable.Option[float64])) + }, + NillableStringArray: func(v any) NormalValue { + return NewNormalNillableStringArray(v.([]immutable.Option[string])) + }, + NillableBytesArray: func(v any) NormalValue { + return NewNormalNillableBytesArray(v.([]immutable.Option[[]byte])) + }, + NillableTimeArray: func(v any) NormalValue { + return NewNormalNillableTimeArray(v.([]immutable.Option[time.Time])) + }, + NillableDocumentArray: func(v any) NormalValue { + return NewNormalNillableDocumentArray(v.([]immutable.Option[*Document])) + }, + + BoolNillableArray: func(v any) NormalValue { + return NewNormalBoolNillableArray(v.(immutable.Option[[]bool])) + }, + IntNillableArray: func(v any) NormalValue { + return NewNormalIntNillableArray(v.(immutable.Option[[]int64])) + }, + FloatNillableArray: func(v any) NormalValue { + return NewNormalFloatNillableArray(v.(immutable.Option[[]float64])) + }, + StringNillableArray: func(v any) NormalValue { + return NewNormalStringNillableArray(v.(immutable.Option[[]string])) + }, + BytesNillableArray: func(v any) NormalValue { + return NewNormalBytesNillableArray(v.(immutable.Option[[][]byte])) + }, + TimeNillableArray: func(v any) NormalValue { + return NewNormalTimeNillableArray(v.(immutable.Option[[]time.Time])) + }, + DocumentNillableArray: func(v any) NormalValue { + return NewNormalDocumentNillableArray(v.(immutable.Option[[]*Document])) + }, + + NillableBoolNillableArray: func(v any) NormalValue { + return NewNormalNillableBoolNillableArray(v.(immutable.Option[[]immutable.Option[bool]])) + }, + NillableIntNillableArray: func(v any) NormalValue { + return NewNormalNillableIntNillableArray(v.(immutable.Option[[]immutable.Option[int64]])) + }, + NillableFloatNillableArray: func(v any) NormalValue { + return NewNormalNillableFloatNillableArray(v.(immutable.Option[[]immutable.Option[float64]])) + }, + NillableStringNillableArray: func(v any) NormalValue { + return NewNormalNillableStringNillableArray(v.(immutable.Option[[]immutable.Option[string]])) + }, + NillableBytesNillableArray: func(v any) NormalValue { + return NewNormalNillableBytesNillableArray(v.(immutable.Option[[]immutable.Option[[]byte]])) + }, + NillableTimeNillableArray: func(v any) NormalValue { + return NewNormalNillableTimeNillableArray(v.(immutable.Option[[]immutable.Option[time.Time]])) + }, + NillableDocumentNillableArray: func(v any) NormalValue { + return NewNormalNillableDocumentNillableArray(v.(immutable.Option[[]immutable.Option[*Document]])) + }, + } + + tests := []struct { + nType nType + input any + isNillable bool + isNil bool + isArray bool + }{ + { + nType: BoolType, + input: true, + }, + { + nType: IntType, + input: int64(1), + }, + { + nType: FloatType, + input: float64(1), + }, + { + nType: StringType, + input: "test", + }, + { + nType: BytesType, + input: []byte{1, 2, 3}, + }, + { + nType: TimeType, + input: time.Now(), + }, + { + nType: DocumentType, + input: &Document{}, + }, + { + nType: NillableBoolType, + input: immutable.Some(true), + isNillable: true, + }, + { + nType: NillableBoolType, + input: immutable.None[bool](), + isNil: true, + isNillable: true, + }, + { + nType: NillableIntType, + input: immutable.Some(int64(1)), + isNillable: true, + }, + { + nType: NillableIntType, + input: immutable.None[int64](), + isNil: true, + isNillable: true, + }, + { + nType: NillableFloatType, + input: immutable.Some(float64(1)), + isNillable: true, + }, + { + nType: NillableFloatType, + input: immutable.None[float64](), + isNil: true, + isNillable: true, + }, + { + nType: NillableStringType, + input: immutable.Some("test"), + isNillable: true, + }, + { + nType: NillableStringType, + input: immutable.None[string](), + isNil: true, + isNillable: true, + }, + { + nType: NillableBytesType, + input: immutable.Some([]byte{1, 2, 3}), + isNillable: true, + }, + { + nType: NillableBytesType, + input: immutable.None[[]byte](), + isNil: true, + isNillable: true, + }, + { + nType: NillableTimeType, + input: immutable.Some(time.Now()), + isNillable: true, + }, + { + nType: NillableTimeType, + input: immutable.None[time.Time](), + isNil: true, + isNillable: true, + }, + { + nType: NillableDocumentType, + input: immutable.Some(&Document{}), + isNillable: true, + }, + { + nType: NillableDocumentType, + input: immutable.None[*Document](), + isNil: true, + isNillable: true, + }, + { + nType: BoolArray, + input: []bool{true, false}, + isArray: true, + }, + { + nType: IntArray, + input: []int64{1, 2, 3}, + isArray: true, + }, + { + nType: FloatArray, + input: []float64{1, 2, 3}, + isArray: true, + }, + { + nType: StringArray, + input: []string{"test", "test2"}, + isArray: true, + }, + { + nType: BytesArray, + input: [][]byte{{1, 2, 3}, {4, 5, 6}}, + isArray: true, + }, + { + nType: TimeArray, + input: []time.Time{time.Now(), time.Now()}, + isArray: true, + }, + { + nType: DocumentArray, + input: []*Document{{}, {}}, + isArray: true, + }, + { + nType: NillableBoolArray, + input: []immutable.Option[bool]{immutable.Some(true)}, + isArray: true, + }, + { + nType: NillableIntArray, + input: []immutable.Option[int64]{immutable.Some(int64(1))}, + isArray: true, + }, + { + nType: NillableFloatArray, + input: []immutable.Option[float64]{immutable.Some(float64(1))}, + isArray: true, + }, + { + nType: NillableStringArray, + input: []immutable.Option[string]{immutable.Some("test")}, + isArray: true, + }, + { + nType: NillableBytesArray, + input: []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})}, + isArray: true, + }, + { + nType: NillableTimeArray, + input: []immutable.Option[time.Time]{immutable.Some(time.Now())}, + isArray: true, + }, + { + nType: NillableDocumentArray, + input: []immutable.Option[*Document]{immutable.Some(&Document{})}, + isArray: true, + }, + { + nType: BoolNillableArray, + input: immutable.Some([]bool{true, false}), + isNillable: true, + isArray: true, + }, + { + nType: BoolNillableArray, + input: immutable.None[[]bool](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: IntNillableArray, + input: immutable.Some([]int64{1, 2, 3}), + isNillable: true, + isArray: true, + }, + { + nType: IntNillableArray, + input: immutable.None[[]int64](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: FloatNillableArray, + input: immutable.Some([]float64{1, 2, 3}), + isNillable: true, + isArray: true, + }, + { + nType: FloatNillableArray, + input: immutable.None[[]float64](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: StringNillableArray, + input: immutable.Some([]string{"test", "test2"}), + isNillable: true, + isArray: true, + }, + { + nType: StringNillableArray, + input: immutable.None[[]string](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: BytesNillableArray, + input: immutable.Some([][]byte{{1, 2, 3}, {4, 5, 6}}), + isNillable: true, + isArray: true, + }, + { + nType: BytesNillableArray, + input: immutable.None[[][]byte](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: TimeNillableArray, + input: immutable.Some([]time.Time{time.Now(), time.Now()}), + isNillable: true, + isArray: true, + }, + { + nType: TimeNillableArray, + input: immutable.None[[]time.Time](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: DocumentNillableArray, + input: immutable.Some([]*Document{{}, {}}), + isNillable: true, + isArray: true, + }, + { + nType: DocumentNillableArray, + input: immutable.None[[]*Document](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableBoolNillableArray, + input: immutable.Some([]immutable.Option[bool]{immutable.Some(true)}), + isNillable: true, + isArray: true, + }, + { + nType: NillableBoolNillableArray, + input: immutable.None[[]immutable.Option[bool]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableIntNillableArray, + input: immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))}), + isNillable: true, + isArray: true, + }, + { + nType: NillableIntNillableArray, + input: immutable.None[[]immutable.Option[int64]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableFloatNillableArray, + input: immutable.Some([]immutable.Option[float64]{immutable.Some(float64(1))}), + isNillable: true, + isArray: true, + }, + { + nType: NillableFloatNillableArray, + input: immutable.None[[]immutable.Option[float64]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableStringNillableArray, + input: immutable.Some([]immutable.Option[string]{immutable.Some("test")}), + isNillable: true, + isArray: true, + }, + { + nType: NillableStringNillableArray, + input: immutable.None[[]immutable.Option[string]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableBytesNillableArray, + input: immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})}), + isNillable: true, + isArray: true, + }, + { + nType: NillableBytesNillableArray, + input: immutable.None[[]immutable.Option[[]byte]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableTimeNillableArray, + input: immutable.Some([]immutable.Option[time.Time]{immutable.Some(time.Now())}), + isNillable: true, + isArray: true, + }, + { + nType: NillableTimeNillableArray, + input: immutable.None[[]immutable.Option[time.Time]](), + isNillable: true, + isNil: true, + isArray: true, + }, + { + nType: NillableDocumentNillableArray, + input: immutable.Some([]immutable.Option[*Document]{immutable.Some(&Document{})}), + isNillable: true, + isArray: true, + }, + } + + for _, tt := range tests { + tStr := string(tt.nType) + t.Run(tStr, func(t *testing.T) { + actual, err := NewNormalValue(tt.input) + require.NoError(t, err) + + for nType, typeAssertFunc := range typeAssertMap { + val, ok := typeAssertFunc(actual) + if nType == tt.nType { + assert.True(t, ok, tStr+"() should return true") + assert.Equal(t, tt.input, val, tStr+"() returned unexpected value") + newVal := newMap[nType](val) + assert.Equal(t, actual, newVal, "New"+tStr+"() returned unexpected NormalValue") + assert.Equal(t, extractValue(tt.input), actual.Unwrap(), + "Unwrap() returned unexpected value for "+tStr) + } else { + assert.False(t, ok, string(nType)+"() should return false for "+tStr) + } + } + + if tt.isNillable { + assert.True(t, actual.IsNillable(), "IsNillable() should return true for "+tStr) + } else { + assert.False(t, actual.IsNillable(), "IsNillable() should return false for "+tStr) + } + + if tt.isNil { + assert.True(t, actual.IsNil(), "IsNil() should return true for "+tStr) + } else { + assert.False(t, actual.IsNil(), "IsNil() should return false for "+tStr) + } + + if tt.isArray { + assert.True(t, actual.IsArray(), "IsArray() should return true for "+tStr) + } else { + assert.False(t, actual.IsArray(), "IsArray() should return false for "+tStr) + } + }) + } +} + +func TestNormalValue_InUnknownType_ReturnError(t *testing.T) { + _, err := NewNormalValue(struct{ name string }{}) + require.ErrorContains(t, err, errCanNotNormalizeValue) +} + +func TestNormalValue_NewNormalValueFromAnyArray(t *testing.T) { + now := time.Now() + doc1 := &Document{} + doc2 := &Document{} + + tests := []struct { + name string + input []any + expected NormalValue + err string + }{ + { + name: "nil input", + input: nil, + err: errCanNotNormalizeValue, + }, + { + name: "unknown element type", + input: []any{struct{ name string }{}}, + err: errCanNotNormalizeValue, + }, + { + name: "mixed elements type", + input: []any{1, "test", true}, + err: errCanNotNormalizeValue, + }, + { + name: "bool elements", + input: []any{true, false}, + expected: NewNormalBoolArray([]bool{true, false}), + }, + { + name: "int elements", + input: []any{int64(1), int64(2)}, + expected: NewNormalIntArray([]int64{1, 2}), + }, + { + name: "float elements", + input: []any{float64(1), float64(2)}, + expected: NewNormalFloatArray([]float64{1, 2}), + }, + { + name: "string elements", + input: []any{"test", "test2"}, + expected: NewNormalStringArray([]string{"test", "test2"}), + }, + { + name: "bytes elements", + input: []any{[]byte{1, 2, 3}, []byte{4, 5, 6}}, + expected: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + }, + { + name: "time elements", + input: []any{now, now}, + expected: NewNormalTimeArray([]time.Time{now, now}), + }, + { + name: "document elements", + input: []any{doc1, doc2}, + expected: NewNormalDocumentArray([]*Document{doc1, doc2}), + }, + { + name: "bool and nil elements", + input: []any{true, nil, false}, + expected: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.None[bool](), immutable.Some(false)}, + ), + }, + { + name: "int and nil elements", + input: []any{1, nil, 2}, + expected: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64](), immutable.Some(int64(2))}, + ), + }, + { + name: "float and nil elements", + input: []any{1.0, nil, 2.0}, + expected: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64](), immutable.Some(2.0)}, + ), + }, + { + name: "string and nil elements", + input: []any{"test", nil, "test2"}, + expected: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.None[string](), immutable.Some("test2")}, + ), + }, + { + name: "bytes and nil elements", + input: []any{[]byte{1, 2, 3}, nil, []byte{4, 5, 6}}, + expected: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{ + immutable.Some([]byte{1, 2, 3}), + immutable.None[[]byte](), + immutable.Some([]byte{4, 5, 6}), + }, + ), + }, + { + name: "time and nil elements", + input: []any{now, nil, now}, + expected: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time](), immutable.Some(now)}, + ), + }, + { + name: "document and nil elements", + input: []any{doc1, nil, doc2}, + expected: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document](), immutable.Some(doc2)}, + ), + }, + { + name: "mixed int elements", + input: []any{int8(1), int16(2), int32(3), int64(4), int(5), uint8(6), uint16(7), uint32(8), + uint64(9), uint(10)}, + expected: NewNormalIntArray([]int64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}), + }, + { + name: "mixed float elements", + input: []any{float32(1.5), float64(2.2)}, + expected: NewNormalFloatArray([]float64{1.5, 2.2}), + }, + { + name: "mixed number elements", + input: []any{int8(1), int16(2), int32(3), int64(4), int(5), uint8(6), uint16(7), uint32(8), + uint64(9), uint(10), float32(1.5), float64(2.2)}, + expected: NewNormalFloatArray([]float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1.5, 2.2}), + }, + { + name: "mixed int and nil elements", + input: []any{int8(1), nil, int16(2), int32(3), int64(4), int(5), uint8(6), uint16(7), uint32(8), + uint64(9), nil, uint(10)}, + expected: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64](), immutable.Some(int64(2)), + immutable.Some(int64(3)), immutable.Some(int64(4)), immutable.Some(int64(5)), immutable.Some(int64(6)), + immutable.Some(int64(7)), immutable.Some(int64(8)), immutable.Some(int64(9)), immutable.None[int64](), + immutable.Some(int64(10))}, + ), + }, + { + name: "mixed float and nil elements", + input: []any{float32(1.5), nil, float64(2.2)}, + expected: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.5), immutable.None[float64](), immutable.Some(2.2)}, + ), + }, + { + name: "mixed number and nil elements", + input: []any{int8(1), nil, int16(2), int32(3), int64(4), int(5), uint8(6), uint16(7), uint32(8), + uint64(9), nil, uint(10), float32(1.5), nil, float64(2.2)}, + expected: NewNormalNillableFloatArray( + []immutable.Option[float64]{ + immutable.Some(1.0), immutable.None[float64](), immutable.Some(2.0), immutable.Some(3.0), + immutable.Some(4.0), immutable.Some(5.0), immutable.Some(6.0), immutable.Some(7.0), + immutable.Some(8.0), immutable.Some(9.0), immutable.None[float64](), immutable.Some(10.0), + immutable.Some(1.5), immutable.None[float64](), immutable.Some(2.2)}, + ), + }, + } + + for _, tt := range tests { + tStr := string(tt.name) + t.Run(tStr, func(t *testing.T) { + actual, err := NewNormalValue(tt.input) + if tt.err != "" { + require.ErrorContains(t, err, tt.err) + return + } + + assert.Equal(t, tt.expected, actual) + }) + } +} + +func TestNormalValue_NewNormalInt(t *testing.T) { + i64 := int64(2) + v := NewNormalInt(i64) + getInt := func(v NormalValue) int64 { i, _ := v.Int(); return i } + + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(float32(2.5)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(float64(2.5)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(int8(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(int16(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(int32(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(int(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint8(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint16(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint32(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint64(2)) + assert.Equal(t, i64, getInt(v)) + + v = NewNormalInt(uint(2)) + assert.Equal(t, i64, getInt(v)) +} + +func TestNormalValue_NewNormalFloat(t *testing.T) { + f64Frac := float64(2.5) + f64 := float64(2) + + getFloat := func(v NormalValue) float64 { f, _ := v.Float(); return f } + + v := NewNormalFloat(f64Frac) + assert.Equal(t, f64Frac, getFloat(v)) + + v = NewNormalFloat(float32(2.5)) + assert.Equal(t, f64Frac, getFloat(v)) + + v = NewNormalFloat(int8(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(int16(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(int32(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(int64(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(int(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint8(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint16(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint32(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint64(2)) + assert.Equal(t, f64, getFloat(v)) + + v = NewNormalFloat(uint(2)) + assert.Equal(t, f64, getFloat(v)) +} + +func TestNormalValue_NewNormalString(t *testing.T) { + strInput := "str" + + getString := func(v NormalValue) string { s, _ := v.String(); return s } + + v := NewNormalString(strInput) + assert.Equal(t, strInput, getString(v)) + + v = NewNormalString([]byte{'s', 't', 'r'}) + assert.Equal(t, strInput, getString(v)) +} + +func TestNormalValue_NewNormalBytes(t *testing.T) { + bytesInput := []byte("str") + + getBytes := func(v NormalValue) []byte { b, _ := v.Bytes(); return b } + + v := NewNormalBytes(bytesInput) + assert.Equal(t, bytesInput, getBytes(v)) + + v = NewNormalBytes("str") + assert.Equal(t, bytesInput, getBytes(v)) +} + +func TestNormalValue_NewNormalIntArray(t *testing.T) { + i64Input := []int64{2} + + getIntArray := func(v NormalValue) []int64 { i, _ := v.IntArray(); return i } + + v := NewNormalIntArray(i64Input) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]float32{2.5}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int8{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int16{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int32{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int64{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]int{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint8{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint16{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint32{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint64{2}) + assert.Equal(t, i64Input, getIntArray(v)) + + v = NewNormalIntArray([]uint{2}) + assert.Equal(t, i64Input, getIntArray(v)) +} + +func TestNormalValue_NewNormalFloatArray(t *testing.T) { + f64InputFrac := []float64{2.5} + f64Input := []float64{2.0} + + getFloatArray := func(v NormalValue) []float64 { f, _ := v.FloatArray(); return f } + + v := NewNormalFloatArray(f64InputFrac) + assert.Equal(t, f64InputFrac, getFloatArray(v)) + + v = NewNormalFloatArray([]float32{2.5}) + assert.Equal(t, f64InputFrac, getFloatArray(v)) + + v = NewNormalFloatArray([]int8{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]int16{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]int32{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]int64{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]int{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint8{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint16{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint32{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint64{2}) + assert.Equal(t, f64Input, getFloatArray(v)) + + v = NewNormalFloatArray([]uint{2}) + assert.Equal(t, f64Input, getFloatArray(v)) +} + +func TestNormalValue_NewNormalStringArray(t *testing.T) { + strInput := []string{"str"} + + getStringArray := func(v NormalValue) []string { s, _ := v.StringArray(); return s } + + v := NewNormalStringArray(strInput) + assert.Equal(t, strInput, getStringArray(v)) + + v = NewNormalStringArray([][]byte{{'s', 't', 'r'}}) + assert.Equal(t, strInput, getStringArray(v)) +} + +func TestNormalValue_NewNormalBytesArray(t *testing.T) { + bytesInput := [][]byte{[]byte("str")} + + getBytesArray := func(v NormalValue) [][]byte { b, _ := v.BytesArray(); return b } + + v := NewNormalBytesArray(bytesInput) + assert.Equal(t, bytesInput, getBytesArray(v)) + + v = NewNormalBytesArray([]string{"str"}) + assert.Equal(t, bytesInput, getBytesArray(v)) +} + +func TestNormalValue_NewNormalNillableFloatArray(t *testing.T) { + f64InputFrac := []immutable.Option[float64]{immutable.Some(2.5)} + f64Input := []immutable.Option[float64]{immutable.Some(2.0)} + + getNillableFloatArray := func(v NormalValue) []immutable.Option[float64] { f, _ := v.NillableFloatArray(); return f } + + v := NewNormalNillableFloatArray(f64InputFrac) + assert.Equal(t, f64InputFrac, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[float32]{immutable.Some[float32](2.5)}) + assert.Equal(t, f64InputFrac, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int8]{immutable.Some[int8](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int16]{immutable.Some[int16](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int32]{immutable.Some[int32](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int64]{immutable.Some[int64](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[int]{immutable.Some[int](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint8]{immutable.Some[uint8](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint16]{immutable.Some[uint16](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint32]{immutable.Some[uint32](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint64]{immutable.Some[uint64](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) + + v = NewNormalNillableFloatArray([]immutable.Option[uint]{immutable.Some[uint](2)}) + assert.Equal(t, f64Input, getNillableFloatArray(v)) +} + +func TestNormalValue_NewNormalNillableIntArray(t *testing.T) { + i64Input := []immutable.Option[int64]{immutable.Some[int64](2)} + + getNillableIntArray := func(v NormalValue) []immutable.Option[int64] { i, _ := v.NillableIntArray(); return i } + + v := NewNormalNillableIntArray(i64Input) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[float32]{immutable.Some[float32](2.5)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[float64]{immutable.Some[float64](2.5)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[int8]{immutable.Some[int8](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[int16]{immutable.Some[int16](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[int32]{immutable.Some[int32](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[int]{immutable.Some[int](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint8]{immutable.Some[uint8](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint16]{immutable.Some[uint16](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint32]{immutable.Some[uint32](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint64]{immutable.Some[uint64](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) + + v = NewNormalNillableIntArray([]immutable.Option[uint]{immutable.Some[uint](2)}) + assert.Equal(t, i64Input, getNillableIntArray(v)) +} + +func TestNormalValue_NewNormalNillableStringArray(t *testing.T) { + strInput := []immutable.Option[string]{immutable.Some("str")} + + getNillableStringArray := func(v NormalValue) []immutable.Option[string] { s, _ := v.NillableStringArray(); return s } + + v := NewNormalNillableStringArray(strInput) + assert.Equal(t, strInput, getNillableStringArray(v)) + + v = NewNormalNillableStringArray([]immutable.Option[[]byte]{immutable.Some[[]byte]([]byte{'s', 't', 'r'})}) + assert.Equal(t, strInput, getNillableStringArray(v)) +} + +func TestNormalValue_NewNormalNillableBytesArray(t *testing.T) { + bytesInput := []immutable.Option[[]byte]{immutable.Some[[]byte]([]byte("str"))} + + getNillableBytesArray := func(v NormalValue) []immutable.Option[[]byte] { b, _ := v.NillableBytesArray(); return b } + + v := NewNormalNillableBytesArray(bytesInput) + assert.Equal(t, bytesInput, getNillableBytesArray(v)) + + v = NewNormalNillableBytesArray([]immutable.Option[string]{immutable.Some("str")}) + assert.Equal(t, bytesInput, getNillableBytesArray(v)) +} + +func TestNormalValue_NewNormalIntArrayNillable(t *testing.T) { + i64Input := immutable.Some([]int64{2}) + + getIntNillableArray := func(v NormalValue) immutable.Option[[]int64] { i, _ := v.IntNillableArray(); return i } + + v := NewNormalIntNillableArray(i64Input) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]float32{2.5})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]float64{2.5})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]int8{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]int16{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]int32{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]int{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint8{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint16{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint32{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint64{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) + + v = NewNormalIntNillableArray(immutable.Some([]uint{2})) + assert.Equal(t, i64Input, getIntNillableArray(v)) +} + +func TestNormalValue_NewNormalFloatNillableArray(t *testing.T) { + f64InputFrac := immutable.Some([]float64{2.5}) + f64Input := immutable.Some([]float64{2.0}) + + getFloatNillableArray := func(v NormalValue) immutable.Option[[]float64] { f, _ := v.FloatNillableArray(); return f } + + v := NewNormalFloatNillableArray(f64InputFrac) + assert.Equal(t, f64InputFrac, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]float32{2.5})) + assert.Equal(t, f64InputFrac, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int8{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int16{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int32{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int64{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]int{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint8{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint16{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint32{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint64{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) + + v = NewNormalFloatNillableArray(immutable.Some([]uint{2})) + assert.Equal(t, f64Input, getFloatNillableArray(v)) +} + +func TestNormalValue_NewNormalStringNillableArray(t *testing.T) { + strInput := immutable.Some([]string{"str"}) + + getStringNillableArray := func(v NormalValue) immutable.Option[[]string] { s, _ := v.StringNillableArray(); return s } + + v := NewNormalStringNillableArray(strInput) + assert.Equal(t, strInput, getStringNillableArray(v)) + + v = NewNormalStringNillableArray(immutable.Some([][]byte{{'s', 't', 'r'}})) + assert.Equal(t, strInput, getStringNillableArray(v)) +} + +func TestNormalValue_NewNormalBytesNillableArray(t *testing.T) { + bytesInput := immutable.Some([][]byte{{'s', 't', 'r'}}) + + getBytesNillableArray := func(v NormalValue) immutable.Option[[][]byte] { s, _ := v.BytesNillableArray(); return s } + + v := NewNormalBytesNillableArray(immutable.Some([]string{"str"})) + assert.Equal(t, bytesInput, getBytesNillableArray(v)) + + v = NewNormalBytesNillableArray(bytesInput) + assert.Equal(t, bytesInput, getBytesNillableArray(v)) +} + +func TestNormalValue_NewNormalNillableIntNillableArray(t *testing.T) { + i64Input := immutable.Some([]immutable.Option[int64]{immutable.Some(int64(2))}) + + getNillableIntNillableArray := func(v NormalValue) immutable.Option[[]immutable.Option[int64]] { + i, _ := v.NillableIntNillableArray() + return i + } + + v := NewNormalNillableIntNillableArray(i64Input) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[float32]{immutable.Some(float32(2.5))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[float64]{immutable.Some(2.5)})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[int8]{immutable.Some(int8(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[int16]{immutable.Some(int16(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[int32]{immutable.Some(int32(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[int]{immutable.Some(int(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint8]{immutable.Some(uint8(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint16]{immutable.Some(uint16(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint32]{immutable.Some(uint32(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint64]{immutable.Some(uint64(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) + + v = NewNormalNillableIntNillableArray(immutable.Some([]immutable.Option[uint]{immutable.Some(uint(2))})) + assert.Equal(t, i64Input, getNillableIntNillableArray(v)) +} + +func TestNormalValue_NewNormalNillableFloatNillableArray(t *testing.T) { + f64InputFrac := immutable.Some([]immutable.Option[float64]{immutable.Some(2.5)}) + f64Input := immutable.Some([]immutable.Option[float64]{immutable.Some(2.0)}) + + getNillableFloatNillableArray := func(v NormalValue) immutable.Option[[]immutable.Option[float64]] { + f, _ := v.NillableFloatNillableArray() + return f + } + + v := NewNormalNillableFloatNillableArray(f64InputFrac) + assert.Equal(t, f64InputFrac, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[float32]{immutable.Some(float32(2.5))})) + assert.Equal(t, f64InputFrac, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int8]{immutable.Some(int8(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int16]{immutable.Some(int16(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int32]{immutable.Some(int32(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int64]{immutable.Some(int64(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[int]{immutable.Some(2)})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint8]{immutable.Some(uint8(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint16]{immutable.Some(uint16(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint32]{immutable.Some(uint32(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint64]{immutable.Some(uint64(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) + + v = NewNormalNillableFloatNillableArray(immutable.Some([]immutable.Option[uint]{immutable.Some(uint(2))})) + assert.Equal(t, f64Input, getNillableFloatNillableArray(v)) +} + +func TestNormalValue_NewNormalNillableStringNillableArray(t *testing.T) { + strInput := immutable.Some([]immutable.Option[string]{immutable.Some("str")}) + + getNillableStringNillableArray := func(v NormalValue) immutable.Option[[]immutable.Option[string]] { + s, _ := v.NillableStringNillableArray() + return s + } + + v := NewNormalNillableStringNillableArray(strInput) + assert.Equal(t, strInput, getNillableStringNillableArray(v)) + + bytesInput := immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{'s', 't', 'r'})}) + v = NewNormalNillableStringNillableArray(bytesInput) + assert.Equal(t, strInput, getNillableStringNillableArray(v)) +} + +func TestNormalValue_NewNormalNillableBytesNillableArray(t *testing.T) { + bytesInput := immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{'s', 't', 'r'})}) + + getNillableBytesNillableArray := func(v NormalValue) immutable.Option[[]immutable.Option[[]byte]] { + s, _ := v.NillableBytesNillableArray() + return s + } + + v := NewNormalNillableBytesNillableArray(bytesInput) + assert.Equal(t, bytesInput, getNillableBytesNillableArray(v)) + + strInput := immutable.Some([]immutable.Option[string]{immutable.Some("str")}) + v = NewNormalNillableBytesNillableArray(strInput) + assert.Equal(t, bytesInput, getNillableBytesNillableArray(v)) +} + +func TestNormalValue_NewNormalNil(t *testing.T) { + fieldKinds := []FieldKind{} + for _, kind := range FieldKindStringToEnumMapping { + fieldKinds = append(fieldKinds, kind) + } + fieldKinds = append(fieldKinds, ObjectKind("Object")) + fieldKinds = append(fieldKinds, ObjectArrayKind("ObjectArr")) + + for _, kind := range fieldKinds { + if kind.IsNillable() { + v, err := NewNormalNil(kind) + require.NoError(t, err) + + assert.True(t, v.IsNil()) + } else { + _, err := NewNormalNil(kind) + require.Error(t, err) + } + } +} + +func TestNormalValue_ToArrayOfNormalValues(t *testing.T) { + now := time.Now() + doc1 := &Document{} + doc2 := &Document{} + + normalNil, err := NewNormalNil(FieldKind_NILLABLE_INT) + require.NoError(t, err) + + tests := []struct { + name string + input NormalValue + expected []NormalValue + err string + }{ + { + name: "nil", + input: normalNil, + }, + { + name: "not array", + input: NewNormalInt(1), + err: errCanNotTurnNormalValueIntoArray, + }, + { + name: "bool elements", + input: NewNormalBoolArray([]bool{true, false}), + expected: []NormalValue{NewNormalBool(true), NewNormalBool(false)}, + }, + { + name: "int elements", + input: NewNormalIntArray([]int64{1, 2}), + expected: []NormalValue{NewNormalInt(1), NewNormalInt(2)}, + }, + { + name: "float elements", + input: NewNormalFloatArray([]float64{1.0, 2.0}), + expected: []NormalValue{NewNormalFloat(1.0), NewNormalFloat(2.0)}, + }, + { + name: "string elements", + input: NewNormalStringArray([]string{"test", "test2"}), + expected: []NormalValue{NewNormalString("test"), NewNormalString("test2")}, + }, + { + name: "bytes elements", + input: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + expected: []NormalValue{NewNormalBytes([]byte{1, 2, 3}), NewNormalBytes([]byte{4, 5, 6})}, + }, + { + name: "time elements", + input: NewNormalTimeArray([]time.Time{now, now}), + expected: []NormalValue{NewNormalTime(now), NewNormalTime(now)}, + }, + { + name: "document elements", + input: NewNormalDocumentArray([]*Document{doc1, doc2}), + expected: []NormalValue{NewNormalDocument(doc1), NewNormalDocument(doc2)}, + }, + { + name: "nillable bool elements", + input: NewNormalNillableBoolArray([]immutable.Option[bool]{ + immutable.Some(true), immutable.Some(false)}), + expected: []NormalValue{ + NewNormalNillableBool(immutable.Some(true)), + NewNormalNillableBool(immutable.Some(false)), + }, + }, + { + name: "nillable int elements", + input: NewNormalNillableIntArray([]immutable.Option[int64]{ + immutable.Some(int64(1)), immutable.Some(int64(2))}), + expected: []NormalValue{ + NewNormalNillableInt(immutable.Some(int64(1))), + NewNormalNillableInt(immutable.Some(int64(2))), + }, + }, + { + name: "nillable float elements", + input: NewNormalNillableFloatArray([]immutable.Option[float64]{ + immutable.Some(1.0), immutable.Some(2.0)}), + expected: []NormalValue{ + NewNormalNillableFloat(immutable.Some(1.0)), + NewNormalNillableFloat(immutable.Some(2.0)), + }, + }, + { + name: "nillable string elements", + input: NewNormalNillableStringArray([]immutable.Option[string]{ + immutable.Some("test"), immutable.Some("test2")}), + expected: []NormalValue{ + NewNormalNillableString(immutable.Some("test")), + NewNormalNillableString(immutable.Some("test2")), + }, + }, + { + name: "nillable bytes elements", + input: NewNormalNillableBytesArray([]immutable.Option[[]byte]{ + immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + expected: []NormalValue{ + NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + NewNormalNillableBytes(immutable.Some([]byte{4, 5, 6})), + }, + }, + { + name: "nillable time elements", + input: NewNormalNillableTimeArray([]immutable.Option[time.Time]{ + immutable.Some(now), immutable.Some(now)}), + expected: []NormalValue{ + NewNormalNillableTime(immutable.Some(now)), + NewNormalNillableTime(immutable.Some(now)), + }, + }, + { + name: "nillable document elements", + input: NewNormalNillableDocumentArray([]immutable.Option[*Document]{ + immutable.Some(doc1), immutable.Some(doc2)}), + expected: []NormalValue{ + NewNormalNillableDocument(immutable.Some(doc1)), + NewNormalNillableDocument(immutable.Some(doc2)), + }, + }, + { + name: "nillable array of bool elements", + input: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + expected: []NormalValue{NewNormalBool(true)}, + }, + { + name: "nillable array of int elements", + input: NewNormalIntNillableArray(immutable.Some([]int64{1})), + expected: []NormalValue{NewNormalInt(1)}, + }, + { + name: "nillable array of float elements", + input: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + expected: []NormalValue{NewNormalFloat(1.0)}, + }, + { + name: "nillable array of string elements", + input: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + expected: []NormalValue{NewNormalString("test")}, + }, + { + name: "nillable array of bytes elements", + input: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + expected: []NormalValue{NewNormalBytes([]byte{1, 2, 3})}, + }, + { + name: "nillable array of time elements", + input: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + expected: []NormalValue{NewNormalTime(now)}, + }, + { + name: "nillable array of document elements", + input: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + expected: []NormalValue{NewNormalDocument(doc1)}, + }, + { + name: "nillable array of nillable bool elements", + input: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + expected: []NormalValue{NewNormalNillableBool(immutable.Some(true))}, + }, + { + name: "nillable array of nillable int elements", + input: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + expected: []NormalValue{NewNormalNillableInt(immutable.Some(int64(1)))}, + }, + { + name: "nillable array of nillable float elements", + input: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + expected: []NormalValue{NewNormalNillableFloat(immutable.Some(1.0))}, + }, + { + name: "nillable array of nillable string elements", + input: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + expected: []NormalValue{NewNormalNillableString(immutable.Some("test"))}, + }, + { + name: "nillable array of nillable bytes elements", + input: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + expected: []NormalValue{NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3}))}, + }, + { + name: "nillable array of nillable time elements", + input: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + expected: []NormalValue{NewNormalNillableTime(immutable.Some(now))}, + }, + { + name: "nillable array of nillable document elements", + input: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + expected: []NormalValue{NewNormalNillableDocument(immutable.Some(doc1))}, + }, + } + + for _, tt := range tests { + tStr := string(tt.name) + t.Run(tStr, func(t *testing.T) { + actual, err := ToArrayOfNormalValues(tt.input) + if tt.err != "" { + require.ErrorContains(t, err, tt.err) + return + } + + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/client/normal_void.go b/client/normal_void.go new file mode 100644 index 0000000000..e3e29b5094 --- /dev/null +++ b/client/normal_void.go @@ -0,0 +1,205 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "time" + + "github.com/sourcenetwork/immutable" +) + +// NormalVoid is a default implementation of NormalValue to be embedded in other types. +// It can be also used to realize Null Object pattern https://en.wikipedia.org/wiki/Null_object_pattern. +type NormalVoid struct{} + +func (NormalVoid) Unwrap() any { + return nil +} + +func (NormalVoid) IsNil() bool { + return false +} + +func (NormalVoid) IsNillable() bool { + return false +} + +func (NormalVoid) Bool() (bool, bool) { + return false, false +} + +func (NormalVoid) Int() (int64, bool) { + return 0, false +} + +func (NormalVoid) Float() (float64, bool) { + return 0, false +} + +func (NormalVoid) String() (string, bool) { + return "", false +} + +func (NormalVoid) Bytes() ([]byte, bool) { + return nil, false +} + +func (NormalVoid) Time() (time.Time, bool) { + return time.Time{}, false +} + +func (NormalVoid) Document() (*Document, bool) { + return nil, false +} + +func (NormalVoid) NillableBool() (immutable.Option[bool], bool) { + return immutable.None[bool](), false +} + +func (NormalVoid) NillableInt() (immutable.Option[int64], bool) { + return immutable.None[int64](), false +} + +func (NormalVoid) NillableFloat() (immutable.Option[float64], bool) { + return immutable.None[float64](), false +} + +func (NormalVoid) NillableString() (immutable.Option[string], bool) { + return immutable.None[string](), false +} + +func (NormalVoid) NillableBytes() (immutable.Option[[]byte], bool) { + return immutable.None[[]byte](), false +} + +func (NormalVoid) NillableTime() (immutable.Option[time.Time], bool) { + return immutable.None[time.Time](), false +} + +func (NormalVoid) NillableDocument() (immutable.Option[*Document], bool) { + return immutable.None[*Document](), false +} + +func (NormalVoid) IsArray() bool { + return false +} + +func (NormalVoid) BoolArray() ([]bool, bool) { + return nil, false +} + +func (NormalVoid) IntArray() ([]int64, bool) { + return nil, false +} + +func (NormalVoid) FloatArray() ([]float64, bool) { + return nil, false +} + +func (NormalVoid) StringArray() ([]string, bool) { + return nil, false +} + +func (NormalVoid) BytesArray() ([][]byte, bool) { + return nil, false +} + +func (NormalVoid) TimeArray() ([]time.Time, bool) { + return nil, false +} + +func (NormalVoid) DocumentArray() ([]*Document, bool) { + return nil, false +} + +func (NormalVoid) NillableBoolArray() ([]immutable.Option[bool], bool) { + return nil, false +} + +func (NormalVoid) NillableIntArray() ([]immutable.Option[int64], bool) { + return nil, false +} + +func (NormalVoid) NillableFloatArray() ([]immutable.Option[float64], bool) { + return nil, false +} + +func (NormalVoid) NillableStringArray() ([]immutable.Option[string], bool) { + return nil, false +} + +func (NormalVoid) NillableBytesArray() ([]immutable.Option[[]byte], bool) { + return nil, false +} + +func (NormalVoid) NillableTimeArray() ([]immutable.Option[time.Time], bool) { + return nil, false +} + +func (NormalVoid) NillableDocumentArray() ([]immutable.Option[*Document], bool) { + return nil, false +} + +func (NormalVoid) BoolNillableArray() (immutable.Option[[]bool], bool) { + return immutable.None[[]bool](), false +} + +func (NormalVoid) IntNillableArray() (immutable.Option[[]int64], bool) { + return immutable.None[[]int64](), false +} + +func (NormalVoid) FloatNillableArray() (immutable.Option[[]float64], bool) { + return immutable.None[[]float64](), false +} + +func (NormalVoid) StringNillableArray() (immutable.Option[[]string], bool) { + return immutable.None[[]string](), false +} + +func (NormalVoid) BytesNillableArray() (immutable.Option[[][]byte], bool) { + return immutable.None[[][]byte](), false +} + +func (NormalVoid) TimeNillableArray() (immutable.Option[[]time.Time], bool) { + return immutable.None[[]time.Time](), false +} + +func (NormalVoid) DocumentNillableArray() (immutable.Option[[]*Document], bool) { + return immutable.None[[]*Document](), false +} + +func (NormalVoid) NillableBoolNillableArray() (immutable.Option[[]immutable.Option[bool]], bool) { + return immutable.None[[]immutable.Option[bool]](), false +} + +func (NormalVoid) NillableIntNillableArray() (immutable.Option[[]immutable.Option[int64]], bool) { + return immutable.None[[]immutable.Option[int64]](), false +} + +func (NormalVoid) NillableFloatNillableArray() (immutable.Option[[]immutable.Option[float64]], bool) { + return immutable.None[[]immutable.Option[float64]](), false +} + +func (NormalVoid) NillableStringNillableArray() (immutable.Option[[]immutable.Option[string]], bool) { + return immutable.None[[]immutable.Option[string]](), false +} + +func (NormalVoid) NillableBytesNillableArray() (immutable.Option[[]immutable.Option[[]byte]], bool) { + return immutable.None[[]immutable.Option[[]byte]](), false +} + +func (NormalVoid) NillableTimeNillableArray() (immutable.Option[[]immutable.Option[time.Time]], bool) { + return immutable.None[[]immutable.Option[time.Time]](), false +} + +func (NormalVoid) NillableDocumentNillableArray() (immutable.Option[[]immutable.Option[*Document]], bool) { + return immutable.None[[]immutable.Option[*Document]](), false +} diff --git a/client/schema_field_description.go b/client/schema_field_description.go index 3fa2c6bc1c..7f945e3ab8 100644 --- a/client/schema_field_description.go +++ b/client/schema_field_description.go @@ -21,7 +21,7 @@ type FieldKind interface { // String returns the string representation of this FieldKind. String() string - // Underlying returns the unterlying Kind as a string. + // Underlying returns the underlying Kind as a string. // // If this is an array, it will return the element kind, else it will return the same as // [String()]. @@ -69,13 +69,13 @@ type SchemaFieldDescription struct { // ScalarKind represents singular scalar field kinds, such as `Int`. type ScalarKind uint8 -// ScalarArrayKind represnts arrays of simple scalar field kinds, such as `[Int]`. +// ScalarArrayKind represents arrays of simple scalar field kinds, such as `[Int]`. type ScalarArrayKind uint8 -// ObjectKind represents singular objects (foriegn and embedded), such as `User`. +// ObjectKind represents singular objects (foreign and embedded), such as `User`. type ObjectKind string -// ObjectKind represents arrays of objects (foriegn and embedded), such as `[User]`. +// ObjectKind represents arrays of objects (foreign and embedded), such as `[User]`. type ObjectArrayKind string var _ FieldKind = ScalarKind(0) diff --git a/client/value.go b/client/value.go index a6719a8479..bc84205cd9 100644 --- a/client/value.go +++ b/client/value.go @@ -17,11 +17,11 @@ import ( type FieldValue struct { t CType - value any + value NormalValue isDirty bool } -func NewFieldValue(t CType, val any) *FieldValue { +func NewFieldValue(t CType, val NormalValue) *FieldValue { return &FieldValue{ t: t, value: val, @@ -30,6 +30,10 @@ func NewFieldValue(t CType, val any) *FieldValue { } func (val FieldValue) Value() any { + return val.value.Unwrap() +} + +func (val FieldValue) NormalValue() NormalValue { return val.value } @@ -38,7 +42,7 @@ func (val FieldValue) Type() CType { } func (val FieldValue) IsDocument() bool { - _, ok := val.value.(*Document) + _, ok := val.value.Document() return ok } @@ -62,17 +66,16 @@ func (val FieldValue) Bytes() ([]byte, error) { } var value any - switch tempVal := val.value.(type) { - case []immutable.Option[string]: - value = convertImmutable(tempVal) - case []immutable.Option[int64]: - value = convertImmutable(tempVal) - case []immutable.Option[float64]: - value = convertImmutable(tempVal) - case []immutable.Option[bool]: - value = convertImmutable(tempVal) - default: - value = val.value + if v, ok := val.value.NillableStringArray(); ok { + value = convertImmutable(v) + } else if v, ok := val.value.NillableIntArray(); ok { + value = convertImmutable(v) + } else if v, ok := val.value.NillableFloatArray(); ok { + value = convertImmutable(v) + } else if v, ok := val.value.NillableBoolArray(); ok { + value = convertImmutable(v) + } else { + value = val.value.Unwrap() } return em.Marshal(value) diff --git a/core/encoding.go b/core/encoding.go index 40e74915b8..eab401c7a2 100644 --- a/core/encoding.go +++ b/core/encoding.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/encoding" ) @@ -245,16 +244,18 @@ func DecodeIndexDataStoreKey( i := len(key.Fields) descending := false + var kind client.FieldKind = client.FieldKind_DocID // If the key has more values encoded then fields on the index description, the last // value must be the docID and we treat it as a string. if i < len(indexDesc.Fields) { descending = indexDesc.Fields[i].Descending + kind = fields[i].Kind } else if i > len(indexDesc.Fields) { return IndexDataStoreKey{}, ErrInvalidKey } - var val any - data, val, err = encoding.DecodeFieldValue(data, descending) + var val client.NormalValue + data, val, err = encoding.DecodeFieldValue(data, descending, kind) if err != nil { return IndexDataStoreKey{}, err } @@ -262,34 +263,7 @@ func DecodeIndexDataStoreKey( key.Fields = append(key.Fields, IndexedField{Value: val, Descending: descending}) } - err = normalizeIndexDataStoreKeyValues(&key, fields) - return key, err -} - -// normalizeIndexDataStoreKeyValues converts all field values to standardized -// Defra Go type according to fields description. -func normalizeIndexDataStoreKeyValues(key *IndexDataStoreKey, fields []client.FieldDefinition) error { - for i := range key.Fields { - if key.Fields[i].Value == nil { - continue - } - var err error - var val any - if i == len(key.Fields)-1 && len(key.Fields)-len(fields) == 1 { - bytes, ok := key.Fields[i].Value.([]byte) - if !ok { - return client.NewErrUnexpectedType[[]byte](request.DocIDArgName, key.Fields[i].Value) - } - val = string(bytes) - } else { - val, err = NormalizeFieldValue(fields[i], key.Fields[i].Value) - } - if err != nil { - return err - } - key.Fields[i].Value = val - } - return nil + return key, nil } // EncodeIndexDataStoreKey encodes a IndexDataStoreKey to bytes to be stored as a key diff --git a/core/errors.go b/core/errors.go index 440e5778ac..d9ae72e0c4 100644 --- a/core/errors.go +++ b/core/errors.go @@ -17,6 +17,7 @@ import ( const ( errFailedToGetFieldIdOfKey string = "failed to get FieldID of Key" errInvalidFieldIndex string = "invalid field index" + errInvalidFieldValue string = "invalid field value" ) var ( @@ -24,6 +25,7 @@ var ( ErrEmptyKey = errors.New("received empty key string") ErrInvalidKey = errors.New("invalid key string") ErrInvalidFieldIndex = errors.New(errInvalidFieldIndex) + ErrInvalidFieldValue = errors.New(errInvalidFieldValue) ) // NewErrFailedToGetFieldIdOfKey returns the error indicating failure to get FieldID of Key. @@ -35,3 +37,8 @@ func NewErrFailedToGetFieldIdOfKey(inner error) error { func NewErrInvalidFieldIndex(i int) error { return errors.New(errInvalidFieldIndex, errors.NewKV("index", i)) } + +// NewErrInvalidFieldValue returns the error indicating invalid field value. +func NewErrInvalidFieldValue(reason string) error { + return errors.New(errInvalidFieldValue, errors.NewKV("Reason", reason)) +} diff --git a/core/key.go b/core/key.go index 4017d445b0..5c569f310d 100644 --- a/core/key.go +++ b/core/key.go @@ -79,7 +79,7 @@ var _ Key = (*DataStoreKey)(nil) // value of a field in an index. type IndexedField struct { // Value is the value of the field in the index - Value any + Value client.NormalValue // Descending is true if the field is sorted in descending order Descending bool } diff --git a/core/key_test.go b/core/key_test.go index 3fa7f41a63..7791075a17 100644 --- a/core/key_test.go +++ b/core/key_test.go @@ -220,26 +220,26 @@ func TestIndexDatastoreKey_Bytes(t *testing.T) { Name: "collection, index and one field", CollectionID: 1, IndexID: 2, - Fields: []IndexedField{{Value: 5}}, + Fields: []IndexedField{{Value: client.NewNormalInt(5)}}, Expected: encodeKey(1, 2, 5, false), }, { Name: "collection, index and two fields", CollectionID: 1, IndexID: 2, - Fields: []IndexedField{{Value: 5}, {Value: 7}}, + Fields: []IndexedField{{Value: client.NewNormalInt(5)}, {Value: client.NewNormalInt(7)}}, Expected: encodeKey(1, 2, 5, false, 7, false), }, { Name: "no index", CollectionID: 1, - Fields: []IndexedField{{Value: 5}}, + Fields: []IndexedField{{Value: client.NewNormalInt(5)}}, Expected: encoding.EncodeUvarintAscending([]byte{'/'}, 1), }, { Name: "no collection", IndexID: 2, - Fields: []IndexedField{{Value: 5}}, + Fields: []IndexedField{{Value: client.NewNormalInt(5)}}, Expected: []byte{}, }, } @@ -255,12 +255,12 @@ func TestIndexDatastoreKey_Bytes(t *testing.T) { } func TestIndexDatastoreKey_ToString(t *testing.T) { - key := NewIndexDataStoreKey(1, 2, []IndexedField{{Value: 5}}) + key := NewIndexDataStoreKey(1, 2, []IndexedField{{Value: client.NewNormalInt(5)}}) assert.Equal(t, key.ToString(), string(encodeKey(1, 2, 5, false))) } func TestIndexDatastoreKey_ToDS(t *testing.T) { - key := NewIndexDataStoreKey(1, 2, []IndexedField{{Value: 5}}) + key := NewIndexDataStoreKey(1, 2, []IndexedField{{Value: client.NewNormalInt(5)}}) assert.Equal(t, key.ToDS(), ds.NewKey(string(encodeKey(1, 2, 5, false)))) } @@ -288,7 +288,7 @@ func TestDecodeIndexDataStoreKey(t *testing.T) { Fields: []client.IndexedFieldDescription{{}}, }, inputBytes: encodeKey(colID, indexID, 5, false), - expectedFields: []IndexedField{{Value: int64(5)}}, + expectedFields: []IndexedField{{Value: client.NewNormalInt(5)}}, }, { name: "two fields (one descending)", @@ -296,8 +296,11 @@ func TestDecodeIndexDataStoreKey(t *testing.T) { ID: indexID, Fields: []client.IndexedFieldDescription{{}, {Descending: true}}, }, - inputBytes: encodeKey(colID, indexID, 5, false, 7, true), - expectedFields: []IndexedField{{Value: int64(5)}, {Value: int64(7), Descending: true}}, + inputBytes: encodeKey(colID, indexID, 5, false, 7, true), + expectedFields: []IndexedField{ + {Value: client.NewNormalInt(5)}, + {Value: client.NewNormalInt(7), Descending: true}, + }, }, { name: "last encoded value without matching field description is docID", @@ -305,9 +308,12 @@ func TestDecodeIndexDataStoreKey(t *testing.T) { ID: indexID, Fields: []client.IndexedFieldDescription{{}}, }, - inputBytes: encoding.EncodeStringAscending(append(encodeKey(1, indexID, 5, false), '/'), "docID"), - expectedFields: []IndexedField{{Value: int64(5)}, {Value: "docID"}}, - fieldKinds: []client.FieldKind{client.FieldKind_NILLABLE_INT}, + inputBytes: encoding.EncodeStringAscending(append(encodeKey(1, indexID, 5, false), '/'), "docID"), + expectedFields: []IndexedField{ + {Value: client.NewNormalInt(5)}, + {Value: client.NewNormalString("docID")}, + }, + fieldKinds: []client.FieldKind{client.FieldKind_NILLABLE_INT}, }, } @@ -384,11 +390,6 @@ func TestDecodeIndexDataStoreKey_InvalidKey(t *testing.T) { val: encodeKey(colID, indexID, 5, false, 7, false, 9, false), numFields: 2, }, - { - name: "invalid docID value", - val: encoding.EncodeUvarintAscending(append(encodeKey(colID, indexID, 5, false), '/'), 5), - numFields: 1, - }, } indexDesc := client.IndexDescription{ID: indexID, Fields: []client.IndexedFieldDescription{{}}} for _, c := range cases { diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index 158c7cb88d..0d09a2fc9a 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -128,7 +128,7 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo property := &encProperty{Desc: indexedField} field := res.key.Fields[i] - if field.Value == nil { + if field.Value.IsNil() { hasNilField = true } @@ -147,11 +147,14 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo if f.indexDesc.Unique && !hasNilField { f.doc.id = res.value } else { - docID, ok := res.key.Fields[len(res.key.Fields)-1].Value.(string) - if !ok { + lastVal := res.key.Fields[len(res.key.Fields)-1].Value + if str, ok := lastVal.String(); ok { + f.doc.id = []byte(str) + } else if bytes, ok := lastVal.Bytes(); ok { + f.doc.id = bytes + } else { return nil, ExecInfo{}, err } - f.doc.id = []byte(docID) } if f.docFetcher != nil && len(f.docFields) > 0 { diff --git a/db/fetcher/indexer_iterators.go b/db/fetcher/indexer_iterators.go index 482c15d31a..d1ca5841c3 100644 --- a/db/fetcher/indexer_iterators.go +++ b/db/fetcher/indexer_iterators.go @@ -161,7 +161,7 @@ func (i *eqSingleIndexIterator) Close() error { type inIndexIterator struct { indexIterator - inValues []any + inValues []client.NormalValue nextValIndex int ctx context.Context store datastore.DSReaderWriter @@ -274,7 +274,7 @@ func (iter *scanningIndexIterator) Next() (indexIterResult, error) { // checks if the value satisfies the condition type valueMatcher interface { - Match(any) (bool, error) + Match(client.NormalValue) (bool, error) } type intMatcher struct { @@ -282,12 +282,17 @@ type intMatcher struct { evalFunc func(int64, int64) bool } -func (m *intMatcher) Match(value any) (bool, error) { - intVal, ok := value.(int64) - if !ok { - return false, NewErrUnexpectedTypeValue[int64](value) +func (m *intMatcher) Match(value client.NormalValue) (bool, error) { + if intVal, ok := value.Int(); ok { + return m.evalFunc(intVal, m.value), nil + } + if intOptVal, ok := value.NillableInt(); ok { + if !intOptVal.HasValue() { + return false, nil + } + return m.evalFunc(intOptVal.Value(), m.value), nil } - return m.evalFunc(intVal, m.value), nil + return false, NewErrUnexpectedTypeValue[int64](value) } type floatMatcher struct { @@ -295,12 +300,17 @@ type floatMatcher struct { evalFunc func(float64, float64) bool } -func (m *floatMatcher) Match(value any) (bool, error) { - floatVal, ok := value.(float64) - if !ok { - return false, NewErrUnexpectedTypeValue[float64](value) +func (m *floatMatcher) Match(value client.NormalValue) (bool, error) { + if floatVal, ok := value.Float(); ok { + return m.evalFunc(floatVal, m.value), nil + } + if floatOptVal, ok := value.NillableFloat(); ok { + if !floatOptVal.HasValue() { + return false, nil + } + return m.evalFunc(floatOptVal.Value(), m.value), nil } - return m.evalFunc(m.value, floatVal), nil + return false, NewErrUnexpectedTypeValue[float64](value) } type stringMatcher struct { @@ -308,74 +318,36 @@ type stringMatcher struct { evalFunc func(string, string) bool } -func (m *stringMatcher) Match(value any) (bool, error) { - stringVal, ok := value.(string) - if !ok { - return false, NewErrUnexpectedTypeValue[string](value) +func (m *stringMatcher) Match(value client.NormalValue) (bool, error) { + if strVal, ok := value.String(); ok { + return m.evalFunc(strVal, m.value), nil + } + if strOptVal, ok := value.NillableString(); ok { + if !strOptVal.HasValue() { + return false, nil + } + return m.evalFunc(strOptVal.Value(), m.value), nil } - return m.evalFunc(m.value, stringVal), nil + return false, NewErrUnexpectedTypeValue[string](value) } -type nilMatcher struct{} +type nilMatcher struct { + matchNil bool +} -func (m *nilMatcher) Match(value any) (bool, error) { - return value == nil, nil +func (m *nilMatcher) Match(value client.NormalValue) (bool, error) { + return value.IsNil() == m.matchNil, nil } // checks if the index value is or is not in the given array type indexInArrayMatcher struct { - inValues []any + inValues []client.NormalValue isIn bool } -func newNinIndexCmp(values []any, kind client.FieldKind, isIn bool) (*indexInArrayMatcher, error) { - normalizeValueFunc := getNormalizeValueFunc(kind) - for i := range values { - normalized, err := normalizeValueFunc(values[i]) - if err != nil { - return nil, err - } - values[i] = normalized - } - return &indexInArrayMatcher{inValues: values, isIn: isIn}, nil -} - -func getNormalizeValueFunc(kind client.FieldKind) func(any) (any, error) { - switch kind { - case client.FieldKind_NILLABLE_INT: - return func(value any) (any, error) { - if v, ok := value.(int64); ok { - return v, nil - } - if v, ok := value.(int32); ok { - return int64(v), nil - } - return nil, ErrInvalidInOperatorValue - } - case client.FieldKind_NILLABLE_FLOAT: - return func(value any) (any, error) { - if v, ok := value.(float64); ok { - return v, nil - } - if v, ok := value.(float32); ok { - return float64(v), nil - } - return nil, ErrInvalidInOperatorValue - } - case client.FieldKind_NILLABLE_STRING: - return func(value any) (any, error) { - if v, ok := value.(string); ok { - return v, nil - } - return nil, ErrInvalidInOperatorValue - } - } - return nil -} - -func (m *indexInArrayMatcher) Match(value any) (bool, error) { +func (m *indexInArrayMatcher) Match(value client.NormalValue) (bool, error) { for _, inVal := range m.inValues { - if inVal == value { + if inVal.Unwrap() == value.Unwrap() { return m.isIn, nil } } @@ -419,17 +391,23 @@ func newLikeIndexCmp(filterValue string, isLike bool, isCaseInsensitive bool) (* return matcher, nil } -func (m *indexLikeMatcher) Match(value any) (bool, error) { - currentVal, ok := value.(string) +func (m *indexLikeMatcher) Match(value client.NormalValue) (bool, error) { + strVal, ok := value.String() if !ok { - return false, NewErrUnexpectedTypeValue[string](currentVal) + strOptVal, ok := value.NillableString() + if !ok { + return false, NewErrUnexpectedTypeValue[string](value) + } + if !strOptVal.HasValue() { + return false, nil + } + strVal = strOptVal.Value() } - if m.isCaseInsensitive { - currentVal = strings.ToLower(currentVal) + strVal = strings.ToLower(strVal) } - return m.doesMatch(currentVal) == m.isLike, nil + return m.doesMatch(strVal) == m.isLike, nil } func (m *indexLikeMatcher) doesMatch(currentVal string) bool { @@ -451,7 +429,7 @@ func (m *indexLikeMatcher) doesMatch(currentVal string) bool { type anyMatcher struct{} -func (m *anyMatcher) Match(any) (bool, error) { return true, nil } +func (m *anyMatcher) Match(client.NormalValue) (bool, error) { return true, nil } // newPrefixIndexIterator creates a new eqPrefixIndexIterator for fetching indexed data. // It can modify the input matchers slice. @@ -459,7 +437,7 @@ func (f *IndexFetcher) newPrefixIndexIterator( fieldConditions []fieldFilterCond, matchers []valueMatcher, ) (*eqPrefixIndexIterator, error) { - keyFieldValues := make([]any, 0, len(fieldConditions)) + keyFieldValues := make([]client.NormalValue, 0, len(fieldConditions)) for i := range fieldConditions { if fieldConditions[i].op != opEq { // prefix can be created only for subsequent _eq conditions @@ -496,14 +474,12 @@ func (f *IndexFetcher) newInIndexIterator( fieldConditions []fieldFilterCond, matchers []valueMatcher, ) (*inIndexIterator, error) { - inArr, ok := fieldConditions[0].val.([]any) - if !ok { + if !fieldConditions[0].val.IsArray() { return nil, ErrInvalidInOperatorValue } - inValues := make([]any, 0, len(inArr)) - for _, v := range inArr { - fieldVal := client.NewFieldValue(client.NONE_CRDT, v) - inValues = append(inValues, fieldVal.Value()) + inValues, err := client.ToArrayOfNormalValues(fieldConditions[0].val) + if err != nil { + return nil, err } // iterators for _in filter already iterate over keys with first field value @@ -514,7 +490,7 @@ func (f *IndexFetcher) newInIndexIterator( var iter indexIterator if isUniqueFetchByFullKey(&f.indexDesc, fieldConditions) { - keyFieldValues := make([]any, len(fieldConditions)) + keyFieldValues := make([]client.NormalValue, len(fieldConditions)) for i := range fieldConditions { keyFieldValues[i] = fieldConditions[i].val } @@ -547,7 +523,7 @@ func (f *IndexFetcher) newIndexDataStoreKey() core.IndexDataStoreKey { return key } -func (f *IndexFetcher) newIndexDataStoreKeyWithValues(values []any) core.IndexDataStoreKey { +func (f *IndexFetcher) newIndexDataStoreKeyWithValues(values []client.NormalValue) core.IndexDataStoreKey { fields := make([]core.IndexedField, len(values)) for i := range values { fields[i].Value = values[i] @@ -557,7 +533,10 @@ func (f *IndexFetcher) newIndexDataStoreKeyWithValues(values []any) core.IndexDa } func (f *IndexFetcher) createIndexIterator() (indexIterator, error) { - fieldConditions := f.determineFieldFilterConditions() + fieldConditions, err := f.determineFieldFilterConditions() + if err != nil { + return nil, err + } matchers, err := createValueMatchers(fieldConditions) if err != nil { @@ -567,7 +546,7 @@ func (f *IndexFetcher) createIndexIterator() (indexIterator, error) { switch fieldConditions[0].op { case opEq: if isUniqueFetchByFullKey(&f.indexDesc, fieldConditions) { - keyFieldValues := make([]any, len(fieldConditions)) + keyFieldValues := make([]client.NormalValue, len(fieldConditions)) for i := range fieldConditions { keyFieldValues[i] = fieldConditions[i].val } @@ -600,49 +579,44 @@ func createValueMatcher(condition *fieldFilterCond) (valueMatcher, error) { return &anyMatcher{}, nil } - if client.IsNillableKind(condition.kind) && condition.val == nil { - return &nilMatcher{}, nil + if condition.val.IsNil() { + return &nilMatcher{matchNil: condition.op == opEq}, nil } switch condition.op { case opEq, opGt, opGe, opLt, opLe, opNe: - switch condition.kind { - case client.FieldKind_NILLABLE_INT: - var intVal int64 - switch v := condition.val.(type) { - case int64: - intVal = v - case int32: - intVal = int64(v) - case int: - intVal = int64(v) - default: - return nil, NewErrUnexpectedTypeValue[int64](condition.val) - } - return &intMatcher{value: intVal, evalFunc: getCompareValsFunc[int64](condition.op)}, nil - case client.FieldKind_NILLABLE_FLOAT: - floatVal, ok := condition.val.(float64) - if !ok { - return nil, NewErrUnexpectedTypeValue[float64](condition.val) - } - return &floatMatcher{value: floatVal, evalFunc: getCompareValsFunc[float64](condition.op)}, nil - case client.FieldKind_DocID, client.FieldKind_NILLABLE_STRING: - strVal, ok := condition.val.(string) - if !ok { - return nil, NewErrUnexpectedTypeValue[string](condition.val) - } - return &stringMatcher{value: strVal, evalFunc: getCompareValsFunc[string](condition.op)}, nil + if v, ok := condition.val.Int(); ok { + return &intMatcher{value: v, evalFunc: getCompareValsFunc[int64](condition.op)}, nil + } + if v, ok := condition.val.NillableInt(); ok { + return &intMatcher{value: v.Value(), evalFunc: getCompareValsFunc[int64](condition.op)}, nil + } + if v, ok := condition.val.Float(); ok { + return &floatMatcher{value: v, evalFunc: getCompareValsFunc[float64](condition.op)}, nil + } + if v, ok := condition.val.NillableFloat(); ok { + return &floatMatcher{value: v.Value(), evalFunc: getCompareValsFunc[float64](condition.op)}, nil + } + if v, ok := condition.val.String(); ok { + return &stringMatcher{value: v, evalFunc: getCompareValsFunc[string](condition.op)}, nil + } + if v, ok := condition.val.NillableString(); ok { + return &stringMatcher{value: v.Value(), evalFunc: getCompareValsFunc[string](condition.op)}, nil } case opIn, opNin: - inArr, ok := condition.val.([]any) - if !ok { - return nil, ErrInvalidInOperatorValue + inVals, err := client.ToArrayOfNormalValues(condition.val) + if err != nil { + return nil, err } - return newNinIndexCmp(inArr, condition.kind, condition.op == opIn) + return &indexInArrayMatcher{inValues: inVals, isIn: condition.op == opIn}, nil case opLike, opNlike, opILike, opNILike: - strVal, ok := condition.val.(string) + strVal, ok := condition.val.String() if !ok { - return nil, NewErrUnexpectedTypeValue[string](condition.val) + strOptVal, ok := condition.val.NillableString() + if !ok { + return nil, NewErrUnexpectedTypeValue[string](condition.val) + } + strVal = strOptVal.Value() } isLike := condition.op == opLike || condition.op == opILike isCaseInsensitive := condition.op == opILike || condition.op == opNILike @@ -668,14 +642,14 @@ func createValueMatchers(conditions []fieldFilterCond) ([]valueMatcher, error) { type fieldFilterCond struct { op string - val any + val client.NormalValue kind client.FieldKind } // determineFieldFilterConditions determines the conditions and their corresponding operation // for each indexed field. // It returns a slice of fieldFilterCond, where each element corresponds to a field in the index. -func (f *IndexFetcher) determineFieldFilterConditions() []fieldFilterCond { +func (f *IndexFetcher) determineFieldFilterConditions() ([]fieldFilterCond, error) { result := make([]fieldFilterCond, 0, len(f.indexedFields)) for i := range f.indexedFields { fieldInd := f.mapping.FirstIndexOfName(f.indexedFields[i].Name) @@ -692,9 +666,19 @@ func (f *IndexFetcher) determineFieldFilterConditions() []fieldFilterCond { condMap := indexFilterCond.(map[connor.FilterKey]any) for key, filterVal := range condMap { opKey := key.(*mapper.Operator) + var normalVal client.NormalValue + var err error + if filterVal == nil { + normalVal, err = client.NewNormalNil(f.indexedFields[i].Kind) + } else { + normalVal, err = client.NewNormalValue(filterVal) + } + if err != nil { + return nil, err + } result = append(result, fieldFilterCond{ op: opKey.Operation, - val: filterVal, + val: normalVal, kind: f.indexedFields[i].Kind, }) break @@ -702,10 +686,14 @@ func (f *IndexFetcher) determineFieldFilterConditions() []fieldFilterCond { break } if !found { - result = append(result, fieldFilterCond{op: opAny}) + result = append(result, fieldFilterCond{ + op: opAny, + val: client.NormalVoid{}, + kind: f.indexedFields[i].Kind, + }) } } - return result + return result, nil } // isUniqueFetchByFullKey checks if the only index key can be fetched by the full index key. @@ -719,11 +707,11 @@ func isUniqueFetchByFullKey(indexDesc *client.IndexDescription, conditions []fie res := indexDesc.Unique && len(conditions) == len(indexDesc.Fields) // first condition is not required to be _eq, but if is, val must be not nil - res = res && (conditions[0].op != opEq || conditions[0].val != nil) + res = res && (conditions[0].op != opEq || !conditions[0].val.IsNil()) // for the rest it must be _eq and val must be not nil for i := 1; i < len(conditions); i++ { - res = res && (conditions[i].op == opEq && conditions[i].val != nil) + res = res && (conditions[i].op == opEq && !conditions[i].val.IsNil()) } return res } diff --git a/db/index.go b/db/index.go index 5de38aac44..c041d3945c 100644 --- a/db/index.go +++ b/db/index.go @@ -116,18 +116,22 @@ type collectionBaseIndex struct { fieldsDescs []client.SchemaFieldDescription } -func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]*client.FieldValue, error) { - result := make([]*client.FieldValue, 0, len(index.fieldsDescs)) +func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]client.NormalValue, error) { + result := make([]client.NormalValue, 0, len(index.fieldsDescs)) for iter := range index.fieldsDescs { fieldVal, err := doc.TryGetValue(index.fieldsDescs[iter].Name) if err != nil { return nil, err } if fieldVal == nil || fieldVal.Value() == nil { - result = append(result, client.NewFieldValue(client.NONE_CRDT, nil)) + normalNil, err := client.NewNormalNil(index.fieldsDescs[iter].Kind) + if err != nil { + return nil, err + } + result = append(result, normalNil) continue } - result = append(result, fieldVal) + result = append(result, fieldVal.NormalValue()) } return result, nil } @@ -142,7 +146,7 @@ func (index *collectionBaseIndex) getDocumentsIndexKey( fields := make([]core.IndexedField, len(index.fieldsDescs)) for i := range index.fieldsDescs { - fields[i].Value = fieldValues[i].Value() + fields[i].Value = fieldValues[i] fields[i].Descending = index.desc.Fields[i].Descending } return core.NewIndexDataStoreKey(index.collection.ID(), index.desc.ID, fields), nil @@ -211,7 +215,7 @@ func (index *collectionSimpleIndex) getDocumentsIndexKey( return core.IndexDataStoreKey{}, err } - key.Fields = append(key.Fields, core.IndexedField{Value: doc.ID().String()}) + key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) return key, nil } @@ -268,7 +272,7 @@ func (index *collectionSimpleIndex) deleteDocIndex( // hasIndexKeyNilField returns true if the index key has a field with nil value func hasIndexKeyNilField(key *core.IndexDataStoreKey) bool { for i := range key.Fields { - if key.Fields[i].Value == nil { + if key.Fields[i].Value.IsNil() { return true } } @@ -334,7 +338,7 @@ func (index *collectionUniqueIndex) getDocumentsIndexRecord( return core.IndexDataStoreKey{}, nil, err } if hasIndexKeyNilField(&key) { - key.Fields = append(key.Fields, core.IndexedField{Value: doc.ID().String()}) + key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) return key, []byte{}, nil } else { return key, []byte(doc.ID().String()), nil diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index d10ad8eb5b..aeca3341b6 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -166,15 +166,25 @@ indexLoop: hasNilValue := false for i, fieldName := range b.fieldsNames { fieldValue, err := b.doc.GetValue(fieldName) - var val any + var val client.NormalValue if err != nil { if !errors.Is(err, client.ErrFieldNotExist) { require.NoError(b.f.t, err) } - } else if fieldValue != nil { - val = fieldValue.Value() } - if val == nil { + if fieldValue != nil { + val = fieldValue.NormalValue() + } else { + kind := client.FieldKind_NILLABLE_STRING + if fieldName == usersAgeFieldName { + kind = client.FieldKind_NILLABLE_INT + } else if fieldName == usersWeightFieldName { + kind = client.FieldKind_NILLABLE_FLOAT + } + val, err = client.NewNormalNil(kind) + require.NoError(b.f.t, err) + } + if val.IsNil() { hasNilValue = true } descending := false @@ -185,7 +195,7 @@ indexLoop: } if !b.isUnique || hasNilValue { - key.Fields = append(key.Fields, core.IndexedField{Value: b.doc.ID().String()}) + key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(b.doc.ID().String())}) } } @@ -1196,7 +1206,7 @@ func TestComposite_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { assert.Len(t, data, 0) } -func TestComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T) { +func TestUniqueComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T) { testCases := []struct { Name string Doc string @@ -1238,34 +1248,36 @@ func TestComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T) { } for _, tc := range testCases { - f := newIndexTestFixture(t) - defer f.db.Close() + t.Run(tc.Name, func(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() - indexDesc := makeUnique(addFieldToIndex(getUsersIndexDescOnName(), usersAgeFieldName)) - _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) - require.NoError(f.t, err) - f.commitTxn() + indexDesc := makeUnique(addFieldToIndex(getUsersIndexDescOnName(), usersAgeFieldName)) + _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + f.commitTxn() - doc, err := client.NewDocFromJSON([]byte(tc.Doc), f.users.Schema()) - require.NoError(f.t, err) + doc, err := client.NewDocFromJSON([]byte(tc.Doc), f.users.Schema()) + require.NoError(f.t, err) - f.saveDocToCollection(doc, f.users) + f.saveDocToCollection(doc, f.users) - oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). - Doc(doc).Unique().Build() + oldKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). + Doc(doc).Unique().Build() - require.NoError(t, doc.SetWithJSON([]byte(tc.Update))) + require.NoError(t, doc.SetWithJSON([]byte(tc.Update))) - newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). - Doc(doc).Unique().Build() + newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). + Doc(doc).Unique().Build() - require.NoError(t, f.users.Update(f.ctx, doc), tc.Name) - f.commitTxn() + require.NoError(t, f.users.Update(f.ctx, doc), tc.Name) + f.commitTxn() - _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) - require.Error(t, err, oldKey.ToString(), oldKey.ToDS(), tc.Name) - _, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS()) - require.NoError(t, err, newKey.ToString(), newKey.ToDS(), tc.Name) + _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) + require.Error(t, err, oldKey.ToString(), oldKey.ToDS(), tc.Name) + _, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS()) + require.NoError(t, err, newKey.ToString(), newKey.ToDS(), tc.Name) + }) } } diff --git a/encoding/field_value.go b/encoding/field_value.go index 9c8cd5589f..f62375a461 100644 --- a/encoding/field_value.go +++ b/encoding/field_value.go @@ -11,30 +11,20 @@ package encoding import ( - "golang.org/x/exp/constraints" - "github.com/sourcenetwork/defradb/client" ) -func encodeIntFieldValue[T constraints.Integer](b []byte, val T, descending bool) []byte { - if descending { - return EncodeVarintDescending(b, int64(val)) - } - return EncodeVarintAscending(b, int64(val)) -} - // EncodeFieldValue encodes a FieldValue into a byte slice. // The encoded value is appended to the supplied buffer and the resulting buffer is returned. -func EncodeFieldValue(b []byte, val any, descending bool) []byte { - if val == nil { +func EncodeFieldValue(b []byte, val client.NormalValue, descending bool) []byte { + if val.IsNil() { if descending { return EncodeNullDescending(b) } else { return EncodeNullAscending(b) } } - switch v := val.(type) { - case bool: + if v, ok := val.Bool(); ok { var boolInt int64 = 0 if v { boolInt = 1 @@ -43,35 +33,66 @@ func EncodeFieldValue(b []byte, val any, descending bool) []byte { return EncodeVarintDescending(b, boolInt) } return EncodeVarintAscending(b, boolInt) - case int: - return encodeIntFieldValue(b, v, descending) - case int32: - return encodeIntFieldValue(b, v, descending) - case int64: - return encodeIntFieldValue(b, v, descending) - case float64: + } + if v, ok := val.NillableBool(); ok { + var boolInt int64 = 0 + if v.Value() { + boolInt = 1 + } + if descending { + return EncodeVarintDescending(b, boolInt) + } + return EncodeVarintAscending(b, boolInt) + } + if v, ok := val.Int(); ok { + if descending { + return EncodeVarintDescending(b, v) + } + return EncodeVarintAscending(b, v) + } + if v, ok := val.NillableInt(); ok { + if descending { + return EncodeVarintDescending(b, v.Value()) + } + return EncodeVarintAscending(b, v.Value()) + } + if v, ok := val.Float(); ok { if descending { return EncodeFloatDescending(b, v) } return EncodeFloatAscending(b, v) - case string: + } + if v, ok := val.NillableFloat(); ok { + if descending { + return EncodeFloatDescending(b, v.Value()) + } + return EncodeFloatAscending(b, v.Value()) + } + if v, ok := val.String(); ok { if descending { return EncodeStringDescending(b, v) } return EncodeStringAscending(b, v) } + if v, ok := val.NillableString(); ok { + if descending { + return EncodeStringDescending(b, v.Value()) + } + return EncodeStringAscending(b, v.Value()) + } return b } -// DecodeFieldValue decodes a FieldValue from a byte slice. +// DecodeFieldValue decodes a field value from a byte slice. // The decoded value is returned along with the remaining byte slice. -func DecodeFieldValue(b []byte, descending bool) ([]byte, any, error) { +func DecodeFieldValue(b []byte, descending bool, kind client.FieldKind) ([]byte, client.NormalValue, error) { typ := PeekType(b) switch typ { case Null: b, _ = DecodeIfNull(b) - return b, nil, nil + nilVal, err := client.NewNormalNil(kind) + return b, nilVal, err case Int: var v int64 var err error @@ -81,9 +102,9 @@ func DecodeFieldValue(b []byte, descending bool) ([]byte, any, error) { b, v, err = DecodeVarintAscending(b) } if err != nil { - return nil, nil, NewErrCanNotDecodeFieldValue(b, client.FieldKind_NILLABLE_INT, err) + return nil, nil, NewErrCanNotDecodeFieldValue(b, kind, err) } - return b, v, nil + return b, client.NewNormalInt(v), nil case Float: var v float64 var err error @@ -93,9 +114,9 @@ func DecodeFieldValue(b []byte, descending bool) ([]byte, any, error) { b, v, err = DecodeFloatAscending(b) } if err != nil { - return nil, nil, NewErrCanNotDecodeFieldValue(b, client.FieldKind_NILLABLE_FLOAT, err) + return nil, nil, NewErrCanNotDecodeFieldValue(b, kind, err) } - return b, v, nil + return b, client.NewNormalFloat(v), nil case Bytes, BytesDesc: var v []byte var err error @@ -105,10 +126,10 @@ func DecodeFieldValue(b []byte, descending bool) ([]byte, any, error) { b, v, err = DecodeBytesAscending(b) } if err != nil { - return nil, nil, NewErrCanNotDecodeFieldValue(b, client.FieldKind_NILLABLE_STRING, err) + return nil, nil, NewErrCanNotDecodeFieldValue(b, kind, err) } - return b, v, nil + return b, client.NewNormalString(v), nil } - return nil, nil, NewErrCanNotDecodeFieldValue(b, client.FieldKind_NILLABLE_STRING) + return nil, nil, NewErrCanNotDecodeFieldValue(b, kind) } diff --git a/encoding/field_value_test.go b/encoding/field_value_test.go index a08446cb1f..69a8096f85 100644 --- a/encoding/field_value_test.go +++ b/encoding/field_value_test.go @@ -15,57 +15,63 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/sourcenetwork/defradb/client" ) func TestEncodeDecodeFieldValue(t *testing.T) { + normalNil, err := client.NewNormalNil(client.FieldKind_NILLABLE_INT) + require.NoError(t, err) + tests := []struct { name string - inputVal any + inputVal client.NormalValue expectedBytes []byte expectedBytesDesc []byte expectedDecodedVal any }{ { name: "nil", - inputVal: nil, + inputVal: normalNil, expectedBytes: EncodeNullAscending(nil), expectedBytesDesc: EncodeNullDescending(nil), - expectedDecodedVal: nil, + expectedDecodedVal: normalNil, }, { name: "bool true", - inputVal: true, + inputVal: client.NewNormalBool(true), expectedBytes: EncodeVarintAscending(nil, 1), expectedBytesDesc: EncodeVarintDescending(nil, 1), - expectedDecodedVal: int64(1), + expectedDecodedVal: client.NewNormalInt(1), }, { name: "bool false", - inputVal: false, + inputVal: client.NewNormalBool(false), expectedBytes: EncodeVarintAscending(nil, 0), expectedBytesDesc: EncodeVarintDescending(nil, 0), - expectedDecodedVal: int64(0), + expectedDecodedVal: client.NewNormalInt(0), }, { name: "int", - inputVal: int64(55), + inputVal: client.NewNormalInt(55), expectedBytes: EncodeVarintAscending(nil, 55), expectedBytesDesc: EncodeVarintDescending(nil, 55), - expectedDecodedVal: int64(55), + expectedDecodedVal: client.NewNormalInt(55), }, { name: "float", - inputVal: 0.2, + inputVal: client.NewNormalFloat(0.2), expectedBytes: EncodeFloatAscending(nil, 0.2), expectedBytesDesc: EncodeFloatDescending(nil, 0.2), - expectedDecodedVal: 0.2, + expectedDecodedVal: client.NewNormalFloat(0.2), }, { name: "string", - inputVal: "str", + inputVal: client.NewNormalString("str"), expectedBytes: EncodeBytesAscending(nil, []byte("str")), expectedBytesDesc: EncodeBytesDescending(nil, []byte("str")), - expectedDecodedVal: []byte("str"), + expectedDecodedVal: client.NewNormalString("str"), }, } @@ -85,7 +91,7 @@ func TestEncodeDecodeFieldValue(t *testing.T) { t.Errorf("EncodeFieldValue() = %v, want %v", encoded, expectedBytes) } - _, decodedFieldVal, err := DecodeFieldValue(encoded, descending) + _, decodedFieldVal, err := DecodeFieldValue(encoded, descending, client.FieldKind_NILLABLE_INT) assert.NoError(t, err) if !reflect.DeepEqual(decodedFieldVal, tt.expectedDecodedVal) { t.Errorf("DecodeFieldValue() = %v, want %v", decodedFieldVal, tt.expectedDecodedVal) @@ -134,7 +140,7 @@ func TestDecodeInvalidFieldValue(t *testing.T) { if descending { inputBytes = tt.inputBytesDesc } - _, _, err := DecodeFieldValue(inputBytes, descending) + _, _, err := DecodeFieldValue(inputBytes, descending, client.FieldKind_NILLABLE_INT) assert.ErrorIs(t, err, ErrCanNotDecodeFieldValue) }) } diff --git a/tests/integration/index/query_with_index_only_field_order_test.go b/tests/integration/index/query_with_index_only_field_order_test.go index ae46213533..13a2f7cb77 100644 --- a/tests/integration/index/query_with_index_only_field_order_test.go +++ b/tests/integration/index/query_with_index_only_field_order_test.go @@ -108,7 +108,7 @@ func TestQueryWithIndex_IfFloatFieldInDescOrder_ShouldFetchInRevertedOrder(t *te testUtils.Request{ Request: ` query { - User(filter: {iq: {_gt: 1}}) { + User(filter: {iq: {_lt: 1}}) { name iq } diff --git a/tests/integration/index/query_with_index_only_filter_test.go b/tests/integration/index/query_with_index_only_filter_test.go index 0c2c337398..1baf7248ac 100644 --- a/tests/integration/index/query_with_index_only_filter_test.go +++ b/tests/integration/index/query_with_index_only_filter_test.go @@ -351,6 +351,59 @@ func TestQueryWithIndex_WithInFilter_ShouldFetch(t *testing.T) { testUtils.ExecuteTestCase(t, test) } +func TestQueryWithIndex_WithInFilterOnFloat_ShouldFetch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test index filtering with _in filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + rate: Float @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "rate": 20.0 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "rate": 20.1 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "rate": 20.2 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "rate": 20.3 + }`, + }, + testUtils.Request{ + Request: ` + query { + User(filter: {rate: {_in: [20, 20.2]}}) { + name + } + }`, + Results: []map[string]any{ + {"name": "Islam"}, + {"name": "Fred"}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + func TestQueryWithIndex_IfSeveralDocsWithInFilter_ShouldFetchAll(t *testing.T) { req := `query { User(filter: {name: {_in: ["Islam"]}}) { diff --git a/tests/integration/index/query_with_unique_index_only_filter_test.go b/tests/integration/index/query_with_unique_index_only_filter_test.go index 08f1b1b927..23563335d4 100644 --- a/tests/integration/index/query_with_unique_index_only_filter_test.go +++ b/tests/integration/index/query_with_unique_index_only_filter_test.go @@ -558,6 +558,14 @@ func TestQueryWithUniqueIndex_WithEqualFilterOnNilValue_ShouldFetch(t *testing.T "name": "Alice" }`, }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Bob", + "age": 0 + }`, + }, testUtils.Request{ Request: ` query { @@ -575,6 +583,109 @@ func TestQueryWithUniqueIndex_WithEqualFilterOnNilValue_ShouldFetch(t *testing.T testUtils.ExecuteTestCase(t, test) } +func TestQueryWithUniqueIndex_WithEqualFilterOnZero_ShouldNotFetchNil(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test index filtering with _eq filter on nil value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Alice" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Bob", + "age": 0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Kate", + "age": 33 + }`, + }, + testUtils.Request{ + Request: ` + query { + User(filter: {age: {_eq: 0}}) { + name + } + }`, + Results: []map[string]any{ + {"name": "Bob"}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithNotEqualFilterOnNilValue_ShouldFetch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test index filtering with _eq filter on nil value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Alice" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Kate", + "age": 0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Bob", + "age": 23 + }`, + }, + testUtils.Request{ + Request: ` + query { + User(filter: {age: {_ne: null}}) { + name + } + }`, + Results: []map[string]any{ + {"name": "Kate"}, + {"name": "Bob"}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + func TestQueryWithUniqueIndex_WithMultipleNilValuesAndEqualFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _eq filter on nil value", diff --git a/tests/integration/query/simple/with_filter/with_in_test.go b/tests/integration/query/simple/with_filter/with_in_test.go index a43f19c37b..7e2aa6df82 100644 --- a/tests/integration/query/simple/with_filter/with_in_test.go +++ b/tests/integration/query/simple/with_filter/with_in_test.go @@ -60,6 +60,47 @@ func TestQuerySimpleWithIntInFilter(t *testing.T) { executeTestCase(t, test) } +func TestQuerySimpleWithIntInFilterOnFloat(t *testing.T) { + test := testUtils.RequestTestCase{ + Description: "Simple query with _in filter on float", + Request: `query { + Users(filter: {HeightM: {_in: [21, 21.2]}}) { + Name + } + }`, + Docs: map[int][]string{ + 0: { + `{ + "Name": "John", + "HeightM": 21.0 + }`, + `{ + "Name": "Bob", + "HeightM": 21.1 + }`, + `{ + "Name": "Carlo", + "HeightM": 21.2 + }`, + `{ + "Name": "Alice", + "HeightM": 21.3 + }`, + }, + }, + Results: []map[string]any{ + { + "Name": "John", + }, + { + "Name": "Carlo", + }, + }, + } + + executeTestCase(t, test) +} + func TestQuerySimpleWithIntInFilterWithNullValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with special filter (or)", From 34ff6480a44b804b3fe8cf21b90b8afffb21ecc8 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 28 Mar 2024 11:57:29 -0400 Subject: [PATCH 15/49] feat: Force explicit primary decl. in SDL for one-ones (#2462) ## Relevant issue(s) Resolves #2461 ## Description Forces the explicit declaration of the primary side of one-one relation fields in SDL. PatchSchema already forces this and needed no change. --- client/errors.go | 9 +++ db/collection.go | 2 +- db/errors.go | 8 --- request/graphql/schema/descriptions_test.go | 4 +- request/graphql/schema/relations.go | 8 +-- tests/gen/gen_auto_test.go | 8 +-- .../backup/one_to_one/export_test.go | 8 +-- .../backup/one_to_one/import_test.go | 4 +- tests/integration/explain/fixture.go | 4 +- .../index/query_with_relation_filter_test.go | 4 +- .../field_kinds/one_to_one_to_one/utils.go | 2 +- .../field_kinds/one_to_one_to_one/utils.go | 2 +- tests/integration/schema/one_one_test.go | 61 +++++++++++++++++++ tests/integration/schema/relations_test.go | 2 +- .../view/one_to_one/with_transform_test.go | 2 +- tests/predefined/gen_predefined_test.go | 2 +- 16 files changed, 93 insertions(+), 37 deletions(-) create mode 100644 tests/integration/schema/one_one_test.go diff --git a/client/errors.go b/client/errors.go index 71a111d431..7e18e9566c 100644 --- a/client/errors.go +++ b/client/errors.go @@ -34,6 +34,7 @@ const ( errCanNotNormalizeValue string = "can not normalize value" errCanNotTurnNormalValueIntoArray string = "can not turn normal value into array" errCanNotMakeNormalNilFromFieldKind string = "can not make normal nil from field kind" + errPrimarySideNotDefined string = "primary side of relation not defined" ) // Errors returnable from this package. @@ -57,6 +58,7 @@ var ( ErrCanNotNormalizeValue = errors.New(errCanNotNormalizeValue) ErrCanNotTurnNormalValueIntoArray = errors.New(errCanNotTurnNormalValueIntoArray) ErrCanNotMakeNormalNilFromFieldKind = errors.New(errCanNotMakeNormalNilFromFieldKind) + ErrPrimarySideNotDefined = errors.New(errPrimarySideNotDefined) ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. @@ -178,3 +180,10 @@ func NewErrCRDTKindMismatch(cType, kind string) error { func NewErrInvalidJSONPaylaod(payload string) error { return errors.New(errInvalidJSONPayload, errors.NewKV("Payload", payload)) } + +func NewErrPrimarySideNotDefined(relationName string) error { + return errors.New( + errPrimarySideNotDefined, + errors.NewKV("RelationName", relationName), + ) +} diff --git a/db/collection.go b/db/collection.go index 49bdf01e71..2ad2cf2ca5 100644 --- a/db/collection.go +++ b/db/collection.go @@ -484,7 +484,7 @@ func validateUpdateSchemaFields( } if !(proposedField.IsPrimaryRelation || relatedField.IsPrimaryRelation) { - return false, NewErrPrimarySideNotDefined(proposedField.RelationName) + return false, client.NewErrPrimarySideNotDefined(proposedField.RelationName) } if proposedField.IsPrimaryRelation && relatedField.IsPrimaryRelation { diff --git a/db/errors.go b/db/errors.go index c32da44671..b854ad2d3d 100644 --- a/db/errors.go +++ b/db/errors.go @@ -31,7 +31,6 @@ const ( errRelationalFieldInvalidRelationType string = "invalid RelationType" errRelationalFieldMissingIDField string = "missing id field for relation object field" errRelationalFieldMissingRelationName string = "missing relation name" - errPrimarySideNotDefined string = "primary side of relation not defined" errPrimarySideOnMany string = "cannot set the many side of a relation as primary" errBothSidesPrimary string = "both sides of a relation cannot be primary" errRelatedFieldKindMismatch string = "invalid Kind of the related field" @@ -273,13 +272,6 @@ func NewErrRelationalFieldMissingRelationName(name string) error { ) } -func NewErrPrimarySideNotDefined(relationName string) error { - return errors.New( - errPrimarySideNotDefined, - errors.NewKV("RelationName", relationName), - ) -} - func NewErrPrimarySideOnMany(name string) error { return errors.New( errPrimarySideOnMany, diff --git a/request/graphql/schema/descriptions_test.go b/request/graphql/schema/descriptions_test.go index 1540037a8d..354109965c 100644 --- a/request/graphql/schema/descriptions_test.go +++ b/request/graphql/schema/descriptions_test.go @@ -157,7 +157,7 @@ func TestSingleSimpleType(t *testing.T) { type Author { name: String age: Int - published: Book + published: Book @primary } `, targetDescs: []client.CollectionDefinition{ @@ -330,7 +330,7 @@ func TestSingleSimpleType(t *testing.T) { type Author { name: String age: Int - published: Book @relation(name:"book_authors") + published: Book @relation(name:"book_authors") @primary } `, targetDescs: []client.CollectionDefinition{ diff --git a/request/graphql/schema/relations.go b/request/graphql/schema/relations.go index e6d2af8b09..6d548ceebe 100644 --- a/request/graphql/schema/relations.go +++ b/request/graphql/schema/relations.go @@ -124,13 +124,7 @@ func (r *Relation) finalize() error { if aBit.isSet(relation_Type_Primary) { return ErrMultipleRelationPrimaries } else if !xBit.isSet(relation_Type_Primary) { - // neither type has primary set, auto add to - // lexicographically first one by schema type name - if strings.Compare(r.schemaTypes[0], r.schemaTypes[1]) < 1 { - r.types[1] = r.types[1] | relation_Type_Primary - } else { - r.types[0] = r.types[0] | relation_Type_Primary - } + return client.NewErrPrimarySideNotDefined(r.name) } } diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index 52ee7eb58d..54212509e0 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -338,7 +338,7 @@ func TestAutoGenerateFromSchema_RelationOneToOne(t *testing.T) { } type Device { - owner: User + owner: User @primary model: String }` @@ -792,7 +792,7 @@ func TestAutoGenerateFromSchema_ConfigThatCanNotBySupplied(t *testing.T) { type Device { model: String - owner: User + owner: User @primary }`, options: []Option{WithTypeDemand("User", 10), WithTypeDemand("Device", 30)}, }, @@ -801,12 +801,12 @@ func TestAutoGenerateFromSchema_ConfigThatCanNotBySupplied(t *testing.T) { type User { name: String device: Device - orders: Order + orders: Order @primary } type Device { model: String - owner: User + owner: User @primary } type Order { diff --git a/tests/integration/backup/one_to_one/export_test.go b/tests/integration/backup/one_to_one/export_test.go index 4ae32cbebc..b52e0bb02f 100644 --- a/tests/integration/backup/one_to_one/export_test.go +++ b/tests/integration/backup/one_to_one/export_test.go @@ -78,8 +78,8 @@ func TestBackupExport_DoubleReletionship_NoError(t *testing.T) { } type Book { name: String - author: User @relation(name: "written_books") - favourite: User @relation(name: "favourite_books") + author: User @relation(name: "written_books") @primary + favourite: User @relation(name: "favourite_books") @primary } `, }, @@ -122,8 +122,8 @@ func TestBackupExport_DoubleReletionshipWithUpdate_NoError(t *testing.T) { } type Book { name: String - author: User @relation(name: "written_books") - favourite: User @relation(name: "favourite_books") + author: User @relation(name: "written_books") @primary + favourite: User @relation(name: "favourite_books") @primary } `, }, diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index 5405dd4225..d7ca39ea55 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -205,8 +205,8 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { } type Book { name: String - author: User @relation(name: "written_books") - favourite: User @relation(name: "favourite_books") + author: User @relation(name: "written_books") @primary + favourite: User @relation(name: "favourite_books") @primary } `, }, diff --git a/tests/integration/explain/fixture.go b/tests/integration/explain/fixture.go index c531d95a84..83db5ff926 100644 --- a/tests/integration/explain/fixture.go +++ b/tests/integration/explain/fixture.go @@ -38,14 +38,14 @@ var SchemaForExplainTests = testUtils.SchemaUpdate{ verified: Boolean books: [Book] articles: [Article] - contact: AuthorContact + contact: AuthorContact @primary } type AuthorContact { cell: String email: String author: Author - address: ContactAddress + address: ContactAddress @primary } type ContactAddress { diff --git a/tests/integration/index/query_with_relation_filter_test.go b/tests/integration/index/query_with_relation_filter_test.go index e3ae71429e..db2f351ae7 100644 --- a/tests/integration/index/query_with_relation_filter_test.go +++ b/tests/integration/index/query_with_relation_filter_test.go @@ -167,7 +167,7 @@ func TestQueryWithIndexOnOneToOnesSecondaryRelation_IfFilterOnIndexedRelation_Sh } type Address { - user: User + user: User @primary city: String @index }`, }, @@ -348,7 +348,7 @@ func TestQueryWithIndexOnOneToTwoRelation_IfFilterOnIndexedRelation_ShouldFilter } type Address { - user: User + user: User @primary city: String @index }`, }, diff --git a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go index 9fce31fdb2..896fb1c5eb 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go @@ -29,7 +29,7 @@ func execute(t *testing.T, test testUtils.TestCase) { name: String rating: Float author: Author - publisher: Publisher + publisher: Publisher @primary } type Author { diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go index 89f0e497f4..131a7194fe 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go @@ -29,7 +29,7 @@ func execute(t *testing.T, test testUtils.TestCase) { name: String rating: Float author: Author - publisher: Publisher + publisher: Publisher @primary } type Author { diff --git a/tests/integration/schema/one_one_test.go b/tests/integration/schema/one_one_test.go new file mode 100644 index 0000000000..e14792f75e --- /dev/null +++ b/tests/integration/schema/one_one_test.go @@ -0,0 +1,61 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package schema + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaOneOne_NoPrimary_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + dog: Dog + } + type Dog { + name: String + owner: User + } + `, + ExpectedError: "primary side of relation not defined. RelationName: dog_user", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaOneOne_TwoPrimaries_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + dog: Dog @primary + } + type Dog { + name: String + owner: User @primary + } + `, + ExpectedError: "relation can only have a single field set as primary", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/relations_test.go b/tests/integration/schema/relations_test.go index d1b420afb6..ade67c689a 100644 --- a/tests/integration/schema/relations_test.go +++ b/tests/integration/schema/relations_test.go @@ -23,7 +23,7 @@ func TestSchemaRelationOneToOne(t *testing.T) { Schema: ` type Dog { name: String - user: User + user: User @primary } type User { dog: Dog diff --git a/tests/integration/view/one_to_one/with_transform_test.go b/tests/integration/view/one_to_one/with_transform_test.go index cc638596e0..e6da410ee1 100644 --- a/tests/integration/view/one_to_one/with_transform_test.go +++ b/tests/integration/view/one_to_one/with_transform_test.go @@ -32,7 +32,7 @@ func TestView_OneToOneWithTransformOnOuter(t *testing.T) { } type Book { name: String - author: Author + author: Author @primary } `, }, diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go index c5e863a51c..94b261059e 100644 --- a/tests/predefined/gen_predefined_test.go +++ b/tests/predefined/gen_predefined_test.go @@ -80,7 +80,7 @@ func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { } type Device { model: String - owner: User + owner: User @primary }` docs, err := CreateFromSDL(schema, DocsList{ From a46c1fa9038026d54dafea20afc02b10879673be Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Wed, 3 Apr 2024 01:40:30 -0400 Subject: [PATCH 16/49] feat: Add Access Control Policy (#2338) ## Relevant issue(s) Part of Epic #1738 Resolves #2019 Resolves #2020 Resolves #2228 ## Description - [Rendered ACP Doc](https://github.com/shahzadlone/defradb/blob/lone/acp-sourcehub-module/acp/README.md) - Introduces an ACP Interface that needs to be satisfied for any type of access control module. - This PR implements a local embedded access control system for DefraDB by implementing the ACP interface. - There should be no remote calls (everything introduced here is local). - There should be no blocking of any kind, no async stuff was introduced. - Documents now have the following three states, from a requester's POV: - Public document (always accessible). - Accessible document (accessible if requested by the owner) - Inaccessible/private document (can't access if requested by non-owner or non-identity request) - Ability to add permissioned schema. - Added permissioned schema examples. - Ability to add policy. - Added policy examples (JSON and YAML). - Ability to specify simple identity addresses on requests. - Mocks and Documents Generated (old and new). ### Demo ```go AddPolicy{ Creator: "source1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", Policy: ` description: a test policy which marks a collection in a database as a resource actor: name: actor resources: users: permissions: read: expr: owner + reader write: expr: owner relations: owner: types: - actor reader: types: - actor admin: manages: - reader types: - actor `, ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", } SchemaUpdate{ Schema: ` type Users @policy(id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", resource: "users") { name: String age: Int } `, } CreateDoc{ Identity: "source1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", Doc: `{"name": "John", "age": 27 }`, } Request{ Identity: "source1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", Request: ` query { Users { _docID name age } } `, Results: []map[string]any{ { "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", "name": "John", "age": int64(27), }, }, } // The Following Requests Don't Have Access Request{ Request: ` query { Users { _docID name age } } `, Results: []map[string]any{}, } Request{ Identity: "cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll". Request: ` query { Users { _docID name age } } `, Results: []map[string]any{}, } ``` ### Features - [x] In-memory and filebased acp module. - [x] Registering of a document with ACP Module on creation. - [x] Add policy command HTTP & CLI. - [x] Detect Policy Marshal Format on adding. - [x] Add permissioned schema. - [x] Reject schema on validation failure. - [x] Accept schema on validation success. - [x] Permissioned Fetcher - [x] Specify Identity on doc create. - [x] Specify Identity on doc delete. - [x] Specify Identity on doc update. - [x] Specify Identity on request. #### Things That Are In Scope Of This PR: - All previous features should behave as they were. - There should be no performance costs/hits to any previous functionality. - Access Control would only be registered for a document on a collection if: - Have ACP Module initialized / avaialable programatically - The creation request had an Indentity attached. - The collection has a permission on it (i.e. collection is permissioned). - Access Controled Read/Write, after Creation and Registering: - If there is no ACP module, have access to all documents (as if acp is turned off). - If there is no Indentity can only operate on public documents (unregistered documents with acp). - If there is Permissioned Collection, Identity and ACPModule then operate on access controled document. - Cosmos Identity Addresses. - Adding of a policy (CLI & HTTP), Tests: ./tests/integration/acp/add_policy - Validation of Linked Policy Resource DPI on Schema Add: - Accepting of a permissioned schema on Valid DPI Resource - Rejecting of a permissioned schema on Invalid DPI Resource - Tests for both here: ./tests/integration/acp/schema/add_dpi #### Things That Are Out Of Scope Of This PR: - Full Fledged Indentity with Authentication - Using ACP with any other feature is not supported. - P2P - Secondary Indexes - Type Joins - Aggregates - Backup & Recover - Views - Lens #### De-scope to after merge. - Update tracking issue with road-map and priority of next tasks. - Add simple identity generate utility command - Add simple identity validation utility command - Fix the identity validation panic ### For Reviewers: #### Recommendations: - To begin might want to read and familiarize yourself with some material under: - [acp/README](https://github.com/shahzadlone/defradb/blob/lone/acp-sourcehub-module/acp/README.md) - When looking at tests can read more about what they test in: - [ACP Test Structure](https://github.com/shahzadlone/defradb/blob/lone/acp-sourcehub-module/tests/integration/acp/README.md) - [Add Policy Tests](https://github.com/shahzadlone/defradb/blob/lone/acp-sourcehub-module/tests/integration/acp/add_policy/README.md) - [Add Schema With DPI Tests](https://github.com/shahzadlone/defradb/blob/lone/acp-sourcehub-module/tests/integration/acp/schema/add_dpi/README.md) - Would highly encourage commit by commit review. #### Commit Priorities: - Commits with label `PR(-)` are unrelated/irrelevant to this PR, can be ignored. - Commits with label `PR(ACP)` are most important to review as they are specific to ACP implementation. - Commits with label `PR(IDENTITY)` are also important as they are specific to Indentity implementation. - Commits with label `PR(*-TEST)` are test related commits that should be looked at. - Commits with label `PR(ACP-*)` are assisting ACP commits (medium priority). - Commits with label `PR(IDENTITY-*)` are assisting ACP commits (medium priority). - Commits with label `PR(WIP)` Should not exist before merge (work in progress commits). - Commits with label `PR(DROP)` Temporary commits that will be dropped before the merge. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [ ] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? - [x] There are add policy tests - [x] There are adding of permissioned schema (with dpi) tests. - [x] There are end-to-end tests with doc creation and read using identity. Specify the platform(s) on which this was tested: - Manjaro WSL2 --- README.md | 3 + acp/README.md | 442 ++++++++++ acp/acp.go | 100 +++ acp/acp_local.go | 310 +++++++ acp/acp_local_test.go | 654 ++++++++++++++ acp/doc.go | 17 + acp/dpi.go | 73 ++ acp/errors.go | 207 +++++ acp/identity/identity.go | 35 + cli/acp.go | 29 + cli/acp_policy.go | 25 + cli/acp_policy_add.go | 139 +++ cli/cli.go | 11 + cli/collection_create.go | 41 +- cli/collection_delete.go | 32 +- cli/collection_get.go | 22 +- cli/collection_list_doc_ids.go | 25 +- cli/collection_update.go | 37 +- cli/dump.go | 4 +- cli/errors.go | 28 +- cli/request.go | 24 +- cli/schema_add.go | 5 + cli/start.go | 13 +- cli/tx_create.go | 3 +- cli/utils.go | 7 + client/collection.go | 77 +- client/collection_description.go | 12 + client/db.go | 20 +- client/errors.go | 3 +- client/mocks/collection.go | 596 ++++++++----- client/mocks/db.go | 79 +- client/policy.go | 31 + core/key.go | 7 +- core/parser.go | 4 + db/backup.go | 16 +- db/backup_test.go | 41 +- db/collection.go | 236 ++++- db/collection_acp.go | 71 ++ db/collection_delete.go | 51 +- db/collection_get.go | 23 +- db/collection_index.go | 30 +- db/collection_update.go | 55 +- db/db.go | 74 +- db/description/collection.go | 2 +- db/errors.go | 11 + db/fetcher/fetcher.go | 92 +- db/fetcher/indexer.go | 18 +- db/fetcher/mocks/fetcher.go | 29 +- db/fetcher/mocks/utils.go | 2 + db/fetcher/versioned.go | 21 +- db/indexed_docs_test.go | 126 ++- db/permission/check.go | 91 ++ db/permission/permission.go | 32 + db/permission/register.go | 50 ++ db/request.go | 19 +- db/schema.go | 6 + db/subscriptions.go | 14 +- db/txn_db.go | 12 +- docs/cli/defradb.md | 11 +- docs/cli/defradb_client.md | 12 +- docs/cli/defradb_client_acp.md | 46 + docs/cli/defradb_client_acp_policy.md | 42 + docs/cli/defradb_client_acp_policy_add.md | 90 ++ docs/cli/defradb_client_backup.md | 11 +- docs/cli/defradb_client_backup_export.md | 11 +- docs/cli/defradb_client_backup_import.md | 11 +- docs/cli/defradb_client_collection.md | 12 +- docs/cli/defradb_client_collection_create.md | 29 +- docs/cli/defradb_client_collection_delete.md | 29 +- .../cli/defradb_client_collection_describe.md | 11 +- docs/cli/defradb_client_collection_docIDs.md | 21 +- docs/cli/defradb_client_collection_get.md | 21 +- docs/cli/defradb_client_collection_patch.md | 63 ++ docs/cli/defradb_client_collection_update.md | 32 +- docs/cli/defradb_client_dump.md | 11 +- docs/cli/defradb_client_index.md | 11 +- docs/cli/defradb_client_index_create.md | 11 +- docs/cli/defradb_client_index_drop.md | 11 +- docs/cli/defradb_client_index_list.md | 11 +- docs/cli/defradb_client_p2p.md | 11 +- docs/cli/defradb_client_p2p_collection.md | 11 +- docs/cli/defradb_client_p2p_collection_add.md | 11 +- .../defradb_client_p2p_collection_getall.md | 11 +- .../defradb_client_p2p_collection_remove.md | 11 +- docs/cli/defradb_client_p2p_info.md | 11 +- docs/cli/defradb_client_p2p_replicator.md | 11 +- .../defradb_client_p2p_replicator_delete.md | 11 +- .../defradb_client_p2p_replicator_getall.md | 11 +- docs/cli/defradb_client_p2p_replicator_set.md | 11 +- docs/cli/defradb_client_query.md | 21 +- docs/cli/defradb_client_schema.md | 11 +- docs/cli/defradb_client_schema_add.md | 16 +- docs/cli/defradb_client_schema_describe.md | 11 +- docs/cli/defradb_client_schema_migration.md | 11 +- .../defradb_client_schema_migration_down.md | 11 +- .../defradb_client_schema_migration_get.md | 41 - .../defradb_client_schema_migration_reload.md | 11 +- ...db_client_schema_migration_set-registry.md | 11 +- .../defradb_client_schema_migration_set.md | 11 +- .../cli/defradb_client_schema_migration_up.md | 11 +- docs/cli/defradb_client_schema_patch.md | 13 +- docs/cli/defradb_client_schema_set-active.md | 11 +- docs/cli/defradb_client_schema_set-default.md | 36 - docs/cli/defradb_client_tx.md | 11 +- docs/cli/defradb_client_tx_commit.md | 11 +- docs/cli/defradb_client_tx_create.md | 11 +- docs/cli/defradb_client_tx_discard.md | 11 +- docs/cli/defradb_client_view.md | 11 +- docs/cli/defradb_client_view_add.md | 11 +- docs/cli/defradb_init.md | 37 - docs/cli/defradb_server-dump.md | 11 +- docs/cli/defradb_start.md | 11 +- docs/cli/defradb_version.md | 11 +- examples/dpi_policy/user_dpi_policy.json | 30 + examples/dpi_policy/user_dpi_policy.yml | 29 + examples/schema/permissioned/book.graphql | 14 + examples/schema/permissioned/users.graphql | 18 + go.mod | 136 ++- go.sum | 785 ++++++++++++++++- http/client.go | 10 +- http/client_acp.go | 63 ++ http/client_collection.go | 201 ++++- http/errors.go | 8 + http/handler.go | 2 + http/handler_acp.go | 83 ++ http/handler_ccip.go | 3 +- http/handler_ccip_test.go | 3 +- http/handler_collection.go | 55 +- http/handler_store.go | 5 +- http/openapi.go | 5 + http/utils.go | 50 +- lens/fetcher.go | 19 +- net/client_test.go | 5 +- net/dag_test.go | 3 +- net/errors.go | 15 +- net/node_test.go | 4 +- net/peer_collection.go | 15 +- net/peer_replicator.go | 23 +- net/peer_test.go | 166 +++- net/server.go | 16 +- net/server_test.go | 12 +- planner/create.go | 6 +- planner/delete.go | 6 +- planner/planner.go | 25 +- planner/scan.go | 2 + planner/update.go | 2 +- request/graphql/schema/collection.go | 35 + request/graphql/schema/errors.go | 6 + request/graphql/schema/manager.go | 1 + request/graphql/schema/types/types.go | 20 + tests/bench/bench_util.go | 3 +- tests/bench/collection/utils.go | 21 +- tests/bench/query/planner/utils.go | 10 +- tests/bench/query/simple/utils.go | 3 +- tests/clients/cli/wrapper.go | 33 +- tests/clients/cli/wrapper_collection.go | 152 +++- tests/clients/http/wrapper.go | 17 +- tests/gen/cli/gendocs.go | 3 +- tests/gen/cli/util_test.go | 2 +- tests/integration/acp.go | 66 ++ tests/integration/acp/README.md | 20 + tests/integration/acp/add_policy/README.md | 20 + .../integration/acp/add_policy/basic_test.go | 100 +++ tests/integration/acp/add_policy/fixture.go | 18 + .../acp/add_policy/with_empty_args_test.go | 93 ++ .../with_extra_perms_and_relations_test.go | 62 ++ .../acp/add_policy/with_extra_perms_test.go | 95 ++ .../add_policy/with_extra_relations_test.go | 107 +++ .../with_invalid_creator_arg_test.go | 75 ++ .../add_policy/with_invalid_relations_test.go | 83 ++ .../with_invalid_required_relation_test.go | 94 ++ .../add_policy/with_invalid_resource_test.go | 44 + .../add_policy/with_managed_relation_test.go | 61 ++ .../add_policy/with_multi_policies_test.go | 351 ++++++++ .../with_multiple_resources_test.go | 173 ++++ .../acp/add_policy/with_no_perms_test.go | 163 ++++ .../acp/add_policy/with_no_resources_test.go | 92 ++ .../acp/add_policy/with_perm_expr_test.go | 98 +++ .../add_policy/with_perm_invalid_expr_test.go | 137 +++ .../with_permissionless_owner_test.go | 144 ++++ .../add_policy/with_unused_relations_test.go | 58 ++ tests/integration/acp/fixture.go | 14 + tests/integration/acp/index/create_test.go | 174 ++++ tests/integration/acp/p2p/replicator_test.go | 89 ++ tests/integration/acp/p2p/subscribe_test.go | 99 +++ .../acp/register_and_delete_test.go | 514 +++++++++++ .../integration/acp/register_and_read_test.go | 457 ++++++++++ .../acp/register_and_update_test.go | 810 ++++++++++++++++++ .../integration/acp/schema/add_dpi/README.md | 7 + .../add_dpi/accept_basic_dpi_fmts_test.go | 214 +++++ .../accept_extra_permissions_on_dpi_test.go | 316 +++++++ .../accept_managed_relation_on_dpi_test.go | 121 +++ ...ept_mixed_resources_on_partial_dpi_test.go | 131 +++ .../schema/add_dpi/accept_multi_dpis_test.go | 183 ++++ .../accept_multi_resources_on_dpi_test.go | 281 ++++++ ...cept_same_resource_on_diff_schemas_test.go | 172 ++++ .../integration/acp/schema/add_dpi/fixture.go | 18 + .../reject_empty_arg_on_schema_test.go | 165 ++++ .../reject_invalid_arg_type_on_schema_test.go | 169 ++++ ...ect_invalid_owner_read_perm_on_dpi_test.go | 438 ++++++++++ ...alid_owner_read_perm_symbol_on_dpi_test.go | 273 ++++++ ...ct_invalid_owner_write_perm_on_dpi_test.go | 438 ++++++++++ ...lid_owner_write_perm_symbol_on_dpi_test.go | 273 ++++++ .../schema/add_dpi/reject_missing_dpi_test.go | 149 ++++ .../reject_missing_id_arg_on_schema_test.go | 165 ++++ .../reject_missing_perms_on_dpi_test.go | 97 +++ ...ect_missing_resource_arg_on_schema_test.go | 170 ++++ .../reject_missing_resource_on_dpi_test.go | 98 +++ ...ect_mixed_resources_on_partial_dpi_test.go | 117 +++ .../update/simple/with_doc_id_test.go | 44 +- .../update/simple/with_doc_ids_test.go | 53 +- .../update/simple/with_filter_test.go | 67 +- tests/integration/collection/utils.go | 3 +- .../updates/remove/policy_test.go | 82 ++ .../updates/replace/policy_test.go | 83 ++ tests/integration/db.go | 6 + .../events/simple/with_create_test.go | 5 +- .../events/simple/with_create_txn_test.go | 3 + .../events/simple/with_delete_test.go | 5 +- .../events/simple/with_update_test.go | 7 +- tests/integration/events/utils.go | 3 +- tests/integration/explain.go | 10 +- .../field_kinds/one_to_one/with_alias_test.go | 2 +- .../one_to_one/with_simple_test.go | 2 +- .../field_kinds/one_to_one/with_alias_test.go | 2 +- .../one_to_one/with_simple_test.go | 2 +- tests/integration/net/order/tcp_test.go | 16 +- tests/integration/net/order/utils.go | 89 +- tests/integration/p2p.go | 14 +- tests/integration/state.go | 1 + tests/integration/test_case.go | 32 + tests/integration/utils2.go | 99 ++- 232 files changed, 15100 insertions(+), 1177 deletions(-) create mode 100644 acp/README.md create mode 100644 acp/acp.go create mode 100644 acp/acp_local.go create mode 100644 acp/acp_local_test.go create mode 100644 acp/doc.go create mode 100644 acp/dpi.go create mode 100644 acp/errors.go create mode 100644 acp/identity/identity.go create mode 100644 cli/acp.go create mode 100644 cli/acp_policy.go create mode 100644 cli/acp_policy_add.go create mode 100644 client/policy.go create mode 100644 db/collection_acp.go create mode 100644 db/permission/check.go create mode 100644 db/permission/permission.go create mode 100644 db/permission/register.go create mode 100644 docs/cli/defradb_client_acp.md create mode 100644 docs/cli/defradb_client_acp_policy.md create mode 100644 docs/cli/defradb_client_acp_policy_add.md create mode 100644 docs/cli/defradb_client_collection_patch.md delete mode 100644 docs/cli/defradb_client_schema_migration_get.md delete mode 100644 docs/cli/defradb_client_schema_set-default.md delete mode 100644 docs/cli/defradb_init.md create mode 100644 examples/dpi_policy/user_dpi_policy.json create mode 100644 examples/dpi_policy/user_dpi_policy.yml create mode 100644 examples/schema/permissioned/book.graphql create mode 100644 examples/schema/permissioned/users.graphql create mode 100644 http/client_acp.go create mode 100644 http/handler_acp.go create mode 100644 tests/integration/acp.go create mode 100644 tests/integration/acp/README.md create mode 100644 tests/integration/acp/add_policy/README.md create mode 100644 tests/integration/acp/add_policy/basic_test.go create mode 100644 tests/integration/acp/add_policy/fixture.go create mode 100644 tests/integration/acp/add_policy/with_empty_args_test.go create mode 100644 tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go create mode 100644 tests/integration/acp/add_policy/with_extra_perms_test.go create mode 100644 tests/integration/acp/add_policy/with_extra_relations_test.go create mode 100644 tests/integration/acp/add_policy/with_invalid_creator_arg_test.go create mode 100644 tests/integration/acp/add_policy/with_invalid_relations_test.go create mode 100644 tests/integration/acp/add_policy/with_invalid_required_relation_test.go create mode 100644 tests/integration/acp/add_policy/with_invalid_resource_test.go create mode 100644 tests/integration/acp/add_policy/with_managed_relation_test.go create mode 100644 tests/integration/acp/add_policy/with_multi_policies_test.go create mode 100644 tests/integration/acp/add_policy/with_multiple_resources_test.go create mode 100644 tests/integration/acp/add_policy/with_no_perms_test.go create mode 100644 tests/integration/acp/add_policy/with_no_resources_test.go create mode 100644 tests/integration/acp/add_policy/with_perm_expr_test.go create mode 100644 tests/integration/acp/add_policy/with_perm_invalid_expr_test.go create mode 100644 tests/integration/acp/add_policy/with_permissionless_owner_test.go create mode 100644 tests/integration/acp/add_policy/with_unused_relations_test.go create mode 100644 tests/integration/acp/fixture.go create mode 100644 tests/integration/acp/index/create_test.go create mode 100644 tests/integration/acp/p2p/replicator_test.go create mode 100644 tests/integration/acp/p2p/subscribe_test.go create mode 100644 tests/integration/acp/register_and_delete_test.go create mode 100644 tests/integration/acp/register_and_read_test.go create mode 100644 tests/integration/acp/register_and_update_test.go create mode 100644 tests/integration/acp/schema/add_dpi/README.md create mode 100644 tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go create mode 100644 tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go create mode 100644 tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go create mode 100644 tests/integration/acp/schema/add_dpi/fixture.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go create mode 100644 tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go create mode 100644 tests/integration/collection_description/updates/remove/policy_test.go create mode 100644 tests/integration/collection_description/updates/replace/policy_test.go diff --git a/README.md b/README.md index a7156888b9..4924170e79 100644 --- a/README.md +++ b/README.md @@ -397,6 +397,9 @@ defradb start --tls --pubkeypath ~/path-to-pubkey.key --privkeypath ~/path-to-pr ``` +## Access Control System +Read more about the access control [here](./acp/README.md). + ## Supporting CORS When accessing DefraDB through a frontend interface, you may be confronted with a CORS error. That is because, by default, DefraDB will not have any allowed origins set. To specify which origins should be allowed to access your DefraDB endpoint, you can specify them when starting the database: diff --git a/acp/README.md b/acp/README.md new file mode 100644 index 0000000000..3fb49968f8 --- /dev/null +++ b/acp/README.md @@ -0,0 +1,442 @@ +# Introduction + +In the realm of information technology (IT) and cybersecurity, **access control** plays a pivotal role in ensuring the confidentiality, integrity, and availability of sensitive resources. Let's delve into why access control policies are crucial for protecting your valuable data. + +## What Is Access Control? + +**Access control** is a mechanism that regulates who or what can view, use, or access a specific resource within a computing environment. Its primary goal is to minimize security risks by ensuring that only **authorized users**, systems, or services have access to the resources they need. But it's more than just granting or denying access, it involves several key components: + +1. **Authentication**: Verifying the identity of an individual or system. +2. **Authorization**: Determining what actions or operations an actor is allowed to perform. +3. **Access**: Granting or denying access based on authorization. +4. **Management**: Administering access rights and permissions. +5. **Audit**: Tracking and monitoring access patterns for accountability. + +## Why Is Access Control Important? + +1. **Mitigating Security Risks**: Cybercriminals are becoming increasingly sophisticated, employing advanced techniques to breach security systems. By controlling who has access to your database, you significantly reduce the risk of unauthorized access, both from external attackers and insider threats. + +2. **Compliance with Regulations**: Various regulatory requirements, such as the **General Data Protection Regulation (GDPR)** and the **Health Insurance Portability and Accountability Act (HIPAA)**, mandate stringent access control measures to protect personal data. Implementing access control ensures compliance with these regulations. + +3. **Preventing Data Breaches**: Access control acts as a proactive measure to deter, detect, and prevent unauthorized access. It ensures that only those with the necessary permissions can access sensitive data or services. + +4. **Managing Complexity**: Modern IT infrastructure, including cloud computing and mobile devices, has exponentially increased the number of access points. Technologies like **identity and access management (IAM)** and approaches like **zero trust** help manage this complexity effectively. + +## Types of Security Access Controls + +Several access control models exist, including: + +- **Role-Based Access Control (RBAC)**: Assigns permissions to roles, roles then are granted to users. A user's active role then defines their access. (e.g., admin, user, manager). +- **Attribute-Based Access Control (ABAC)**: Considers various attributes (e.g., user attributes, resource attributes) for access decisions. +- **Discretionary Access Control (DAC)**: Users with sufficient permissions (resource owners) are to grant / share an object with other users. +- **Mandatory Access Control (MAC)**: Users are not allowed to grant access to other users. Permissions are granted based on a minimum role / hierarchy (security labels and clearances) that must be met. +- **Policy-Based Access Control (PBAC)**: Enforces access based on defined policies. +- **Relation-Based Access Control (ReBac)**: Relations between objects and users in the system are used to derive their permissions. + +- Note: **DefraDB** access control rules strongly resembles **Discretionary Access Control (DAC)**, which is implemented through a **Relation-Based Access Control System (ReBac) Engine** + +## Challenges of Access Control in Cybersecurity + +- **Distributed IT Environments**: Cloud computing and remote work create new challenges. +- **Rise of Mobility**: Mobile devices in the workplace add complexity. +- **Password Fatigue**: Balancing security with usability. +- **Data Governance**: Ensuring visibility and control. +- **Multi-Tenancy**: Managing complex permissions in SaaS applications. + +## Key takeaway +A robust access control policy system is your first line of defense against unauthorized access and data breaches. + + +# DefraDB's Access Control System + +## ReBac Authorization Model + +### Zanzibar +In 2019, Google published their [Zanzibar](https://research.google/pubs/zanzibar-googles-consistent-global-authorization-system/) paper, a paper explaining how they handle authorization across their many services. It uses access control lists but with relationship-based access control rather than role-based access control. Relationship-Based Access Control (ReBAC) establishes an authorization model where a subject's permission to access an object is defined by the presence of relationships between those subjects and objects. +The way Zanzibar works is it exposes an API with (mainly) operations to manage `Relationships` (`tuples`) and Verify Access Requests (can Bob do X) through the `Check` call. A `tuple` includes subject, relation, and object. The Check call performs Graph Search over the `tuples` to find a path between the user and the object, if such a path exist then according to `RelBAC` the user has the queried permission. It operates as a Consistent and Partition-Tolerant System. + +### Zanzi +However the Zanzibar API is centralized, so we (Source Network) created a decentralized implementation of Zanzibar called **Zanzi**. Which is powered by our SourceHub trust protocol. Zanzi is a general purpose Zanzibar implementation which operates over a KV persistence layer. + +### SourceHub ACP Module +DefraDB wraps the `local` and `remote` SourceHub ACP Modules to bring all that magic to DefraDB. + +In order to setup the relation based access control, SourceHub requires an agreed upon contract which models the `relations`, `permissions`, and `actors`. That contract is refered to as a `SourceHub Policy`. The policy model's all the `relations` and `permissions` under a `resource`. +A `resource` corresponds to that "thing" that we want to gate the access control around. This can be a `Type`, `Container`, `Schema`, `Shape` or anything that has Objects that need access control. Once the policy is finalized, it has to be uploaded to the `SourceHub Module` so it can be used. +Once the `Policy` is uploaded to the `SourceHub Module` then an `Actor` can begin registering the `Object` for access control by linking to a `Resource` that exists on the uploaded `Policy`. +After the `Object` is registered successfully, the `Actor` will then get a special built-in relation with that `Object` called the `"owner"` relation. This relation is given to the `Registerer` of an `Object`. +Then an `Actor` can issue `Check` calls to see if they have access to an `Object`. + +## Document Access Control (DAC) +In DefraDB's case we wanted to gate access control around the `Documents` that belonged to a specific `Collection`. Here, the `Collection` (i.e. the type/shape of the `Object`) can be thought of as the `Resource`, and the `Documents` are the `Objects`. + + +## Field Access Control (FAC) (coming soon) +We also want the ability to do a more granular access control than just DAC. Therefore we have `Field` level access control for situations where some fields of a `Document` need to be private, while others do not. In this case the `Document` becomes the `Resource` and the `Fields` are the `Objects` being gated. + + +## Admin Access Control (AAC) (coming soon) +We also want to model access control around the `Admin Level Operations` that exist in `DefraDB`. In this case the entire `Database` would be the `Resource` and the `Admin Level Operations` are the `Objects` being gated. + +A non-exhastive list of some operations only admins should have access for: +- Ability to turnoff ACP +- Ability to interact with the P2P system + +## SourceHub Policies Are Too Flexible +SourceHub Policies are too flexible (atleast until the ability to define `Meta Policies` is implemented). This is because SourceHub leaves it up to the user to specify any type of `Permissions` and `Relations`. However for DefraDB, there are certain guarantees that **MUST** be maintained in order for the `Policy` to be effective. For example the user can input any name for a `Permission`, or `Relation` that DefraDB has no knowledge of. Another example is when a user might make a `Policy` that does not give any `Permission` to the `owner`. Which means in the case of DAC no one will have any access to the `Document` they created. +Therefore There was a very clear need to define some rules while writing a `Resource` in a `Policy` which will be used with DefraDB's DAC, FAC, or AAC. These rules will guarantee that certain `Required Permissions` will always be there on a `Resource` and that `Owner` has the correct `Permissions`. + +We call these rules DPI A.K.A DefraDB Policy Interface. + +## Terminologies +- 'SourceHub Address' is a `Bech32` Address with a specific SourceHub prefix. +- 'Identity' is a combination of SourceHub Address and a Key-Pair Signature. +- 'DPI' means 'DefraDB Policy Interface'. +- 'Partially-DPI' policy means a policy with at least one DPI compliant resource. +- 'Permissioned Collection' means to have a policy on the collection, like: `@policy(id:".." resource: "..")` +- 'Permissioned Request' means to have a request with a SourceHub Identity. + + +## DAC DPI Rules + +To qualify as a DPI-compliant `resource`, the following rules **MUST** be satisfied: +- The resource **must include** the mandatory `registerer` (`owner`) relation within the `relations` attribute. +- The resource **must encompass** all the required permissions under the `permissions` attribute. +- Every required permission must have the required registerer relation (`owner`) in `expr`. +- The required registerer relation **must be positioned** as the leading (first) relation in `expr` (see example below). +- Any relation after the required registerer relation must only be a union set operation (`+`). + +For a `Policy` to be `DPI` compliant for DAC, all of its `resources` must be DPI compliant. +To be `Partially-DPI` at least one of its `resource` must be DPI compliant. + +### More Into The Weeds: + +All mandatory permissions are: +- Specified in the `dpi.go` file within the variable `dpiRequiredPermissions`. + +The name of the required 'registerer' relation is: +- Specified in the `dpi.go` file within the variable `requiredRegistererRelationName`. + +### DPI Resource Examples: +- Check out tests here: [tests/integration/acp/schema/add_dpi](/tests/integration/acp/schema/add_dpi) +- The tests linked are broken into `accept_*_test.go` and `reject_*_test.go` files. +- Accepted tests document the valid DPIs (as the schema is accepted). +- Rejected tests document invalid DPIs (as the schema is rejected). +- There are also some Partially-DPI tests that are both accepted and rejected depending on the resource. + +### Required Permission's Expression: +Even though the following expressions are valid generic policy expressions, they will make a +DPI compliant resource lose its DPI status as these expressions are not in accordance to +our DPI [rules](#dac-dpi-rules). Assuming these `expr` are under a required permission label: +- `expr: owner-owner` +- `expr: owner-reader` +- `expr: owner&reader` +- `expr: owner - reader` +- `expr: ownerMalicious + owner` +- `expr: ownerMalicious` +- `expr: owner_new` +- `expr: reader+owner` +- `expr: reader-owner` +- `expr: reader - owner` + +Here are some valid expression examples. Assuming these `expr` are under a required permission label: +- `expr: owner` +- `expr: owner + reader` +- `expr: owner +reader` +- `expr: owner+reader` + + +## DAC Usage CLI: + +### Adding a Policy: + +We have in `examples/dpi_policy/user_dpi_policy.yml`: +```yaml +description: A Valid DefraDB Policy Interface (DPI) + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor +``` + +CLI Command: +```sh +defradb client acp policy add -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j -f examples/dpi_policy/user_dpi_policy.yml + +``` + +Result: +```json +{ + "PolicyID": "24ab8cba6d6f0bcfe4d2712c7d95c09dd1b8076ea5a8896476413fd6c891c18c" +} +``` + +### Add schema, linking to a resource within the policy we added: + +We have in `examples/schema/permissioned/users.graphql`: +```graphql +type Users @policy( + id: "24ab8cba6d6f0bcfe4d2712c7d95c09dd1b8076ea5a8896476413fd6c891c18c", + resource: "users" +) { + name: String + age: Int +} +``` + +CLI Command: +```sh +defradb client schema add -f examples/schema/permissioned/users.graphql +``` + +Result: +```json +[ + { + "Name": "Users", + "ID": 1, + "RootID": 1, + "SchemaVersionID": "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", + "Sources": [], + "Fields": [ + { + "Name": "_docID", + "ID": 0 + }, + { + "Name": "age", + "ID": 1 + }, + { + "Name": "name", + "ID": 2 + } + ], + "Indexes": [], + "Policy": { + "ID": "24ab8cba6d6f0bcfe4d2712c7d95c09dd1b8076ea5a8896476413fd6c891c18c", + "ResourceName": "users" + } + } +] + +``` + +### Create private documents (with identity) + +CLI Command: +```sh +defradb client collection create -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users '[{ "name": "SecretShahzad" }, { "name": "SecretLone" }]' +``` + +### Create public documents (without identity) + +CLI Command: +```sh +defradb client collection create --name Users '[{ "name": "PublicShahzad" }, { "name": "PublicLone" }]' +``` + +### Get all docIDs without an identity (shows only public): +CLI Command: +```sh +defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j +``` + +Result: +```json +{ + "docID": "bae-63ba68c9-78cb-5060-ab03-53ead1ec5b83", + "error": "" +} +{ + "docID": "bae-ba315e98-fb37-5225-8a3b-34a1c75cba9e", + "error": "" +} +``` + + +### Get all docIDs with an identity (shows public and owned documents): +```sh +defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j +``` + +Result: +```json +{ + "docID": "bae-63ba68c9-78cb-5060-ab03-53ead1ec5b83", + "error": "" +} +{ + "docID": "bae-a5830219-b8e7-5791-9836-2e494816fc0a", + "error": "" +} +{ + "docID": "bae-ba315e98-fb37-5225-8a3b-34a1c75cba9e", + "error": "" +} +{ + "docID": "bae-eafad571-e40c-55a7-bc41-3cf7d61ee891", + "error": "" +} +``` + + +### Access the private document (including field names): +CLI Command: +```sh +defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Result: +```json +{ + "_docID": "bae-a5830219-b8e7-5791-9836-2e494816fc0a", + "name": "SecretShahzad" +} +``` + +### Accessing the private document without an identity: +CLI Command: +```sh +defradb client collection get --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Error: +``` + Error: document not found or not authorized to access +``` + +### Accessing the private document with wrong identity: +CLI Command: +```sh +defradb client collection get -i cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Error: +``` + Error: document not found or not authorized to access +``` + +### Update private document: +CLI Command: +```sh +defradb client collection update -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users --docID "bae-a5830219-b8e7-5791-9836-2e494816fc0a" --updater '{ "name": "SecretUpdatedShahzad" }' +``` + +Result: +```json +{ + "Count": 1, + "DocIDs": [ + "bae-a5830219-b8e7-5791-9836-2e494816fc0a" + ] +} +``` + +#### Check if it actually got updated: +CLI Command: +```sh +defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Result: +```json +{ + "_docID": "bae-a5830219-b8e7-5791-9836-2e494816fc0a", + "name": "SecretUpdatedShahzad" +} +``` + +### Update With Filter example (coming soon) + +### Delete private document: +CLI Command: +```sh +defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users --docID "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Result: +```json +{ + "Count": 1, + "DocIDs": [ + "bae-a5830219-b8e7-5791-9836-2e494816fc0a" + ] +} +``` + +#### Check if it actually got deleted: +CLI Command: +```sh +defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name Users "bae-a5830219-b8e7-5791-9836-2e494816fc0a" +``` + +Error: +``` + Error: document not found or not authorized to access +``` + +### Delete With Filter example (coming soon) + +### Typejoin example (coming soon) + +### View example (coming soon) + +### P2P example (coming soon) + +### Backup / Import example (coming soon) + +### Secondary Indexes example (coming soon) + +### Execute Explain example (coming soon) + + +## DAC Usage HTTP: +HTTP requests work similar to their CLI counter parts, the main difference is that the identity will just be specified within the Auth Header like so: `Authorization: Basic `. + +Note: The `Basic` label will change to `Bearer ` after JWS Authentication Tokens are supported. + +## _AAC DPI Rules (coming soon)_ +## _AAC Usage: (coming soon)_ + +## _FAC DPI Rules (coming soon)_ +## _FAC Usage: (coming soon)_ + +## Warning / Caveats +The following features currently don't work with ACP, they are being actively worked on. +- [P2P: Adding a replicator with permissioned collection](https://github.com/sourcenetwork/defradb/issues/2366) +- [P2P: Subscription to a permissioned collection](https://github.com/sourcenetwork/defradb/issues/2366) +- [Adding Secondary Indexes](https://github.com/sourcenetwork/defradb/issues/2365) +- [Backing/Restoring Private Documents](https://github.com/sourcenetwork/defradb/issues/2430) + +The following features may have undefined/unstable behavior until they are properly tested: +- [Views](https://github.com/sourcenetwork/defradb/issues/2018) +- [Average Operations](https://github.com/sourcenetwork/defradb/issues/2475) +- [Count Operations](https://github.com/sourcenetwork/defradb/issues/2474) +- [Group Operations](https://github.com/sourcenetwork/defradb/issues/2473) +- [Limit Operations](https://github.com/sourcenetwork/defradb/issues/2472) +- [Order Operations](https://github.com/sourcenetwork/defradb/issues/2471) +- [Sum Operations](https://github.com/sourcenetwork/defradb/issues/2470) +- [Dag/Commit Operations](https://github.com/sourcenetwork/defradb/issues/2469) +- [Delete With Filter Operations](https://github.com/sourcenetwork/defradb/issues/2468) +- [Update With Filter Operations](https://github.com/sourcenetwork/defradb/issues/2467) +- [Type Join Many Operations](https://github.com/sourcenetwork/defradb/issues/2466) +- [Type Join One Operations](https://github.com/sourcenetwork/defradb/issues/2466) +- [Parallel Operations](https://github.com/sourcenetwork/defradb/issues/2465) +- [Execute Explain](https://github.com/sourcenetwork/defradb/issues/2464) diff --git a/acp/acp.go b/acp/acp.go new file mode 100644 index 0000000000..af99bcb86f --- /dev/null +++ b/acp/acp.go @@ -0,0 +1,100 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/corelog" +) + +var ( + log = corelog.NewLogger("acp") + + // NoACP is an empty ACP, this is used to disable access control. + NoACP = immutable.None[ACP]() +) + +// ACP is the interface to all types of access control that might exist. +type ACP interface { + // Init initializes the acp, with an absolute path. The provided path indicates where the + // persistent data will be stored for acp. + // + // If the path is empty then acp will run in memory. + Init(ctx context.Context, path string) + + // Start starts the acp, using the initialized path. Will recover acp state + // from a previous run if under the same path. + // + // If the path is empty then acp will run in memory. + Start(ctx context.Context) error + + // Close closes the resources in use by acp. + Close() error + + // AddPolicy attempts to add the given policy. Detects the format of the policy automatically + // by assuming YAML format if JSON validation fails. Upon success a policyID is returned, + // otherwise returns error. + // + // A policy can not be added without a creator identity (sourcehub address). + AddPolicy(ctx context.Context, creatorID string, policy string) (string, error) + + // ValidateResourceExistsOnValidDPI performs DPI validation of the resource (matching resource name) + // that is on the policy (matching policyID), returns an error upon validation failure. + // + // Learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + ValidateResourceExistsOnValidDPI( + ctx context.Context, + policyID string, + resourceName string, + ) error + + // RegisterDocObject registers the document (object) to have access control. + // No error is returned upon successful registering of a document. + // + // Note(s): + // - This function does not check the collection to see if the document actually exists. + // - Some documents might be created without an identity signature so they would have public access. + // - actorID here is the identity of the actor registering the document object. + RegisterDocObject( + ctx context.Context, + actorID string, + policyID string, + resourceName string, + docID string, + ) error + + // IsDocRegistered returns true if the document was found to be registered, otherwise returns false. + // If check failed then an error and false will be returned. + IsDocRegistered( + ctx context.Context, + policyID string, + resourceName string, + docID string, + ) (bool, error) + + // CheckDocAccess returns true if the check was successfull and the request has access to the document. If + // the check was successful but the request does not have access to the document, then returns false. + // Otherwise if check failed then an error is returned (and the boolean result should not be used). + // + // Note(s): + // - permission here is a valid DPI permission we are checking for ("read" or "write"). + CheckDocAccess( + ctx context.Context, + permission DPIPermission, + actorID string, + policyID string, + resourceName string, + docID string, + ) (bool, error) +} diff --git a/acp/acp_local.go b/acp/acp_local.go new file mode 100644 index 0000000000..e569efd5d0 --- /dev/null +++ b/acp/acp_local.go @@ -0,0 +1,310 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "context" + + protoTypes "github.com/cosmos/gogoproto/types" + "github.com/sourcenetwork/corelog" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/sourcehub/x/acp/embedded" + "github.com/sourcenetwork/sourcehub/x/acp/types" + "github.com/valyala/fastjson" + + "github.com/sourcenetwork/defradb/errors" +) + +var ( + _ ACP = (*ACPLocal)(nil) +) + +// ACPLocal represents a local acp implementation that makes no remote calls. +type ACPLocal struct { + pathToStore immutable.Option[string] + localACP *embedded.LocalACP +} + +func (l *ACPLocal) Init(ctx context.Context, path string) { + if path == "" { + l.pathToStore = immutable.None[string]() + } else { + l.pathToStore = immutable.Some(path) + } +} + +func (l *ACPLocal) Start(ctx context.Context) error { + var localACP embedded.LocalACP + var err error + + if !l.pathToStore.HasValue() { // Use a non-persistent, i.e. in memory store. + localACP, err = embedded.NewLocalACP( + embedded.WithInMemStore(), + ) + + if err != nil { + return NewErrInitializationOfACPFailed(err, "Local", "in-memory") + } + } else { // Use peristent storage. + acpStorePath := l.pathToStore.Value() + "/" + embedded.DefaultDataDir + localACP, err = embedded.NewLocalACP( + embedded.WithPersistentStorage(acpStorePath), + ) + if err != nil { + return NewErrInitializationOfACPFailed(err, "Local", l.pathToStore.Value()) + } + } + + l.localACP = &localACP + return nil +} + +func (l *ACPLocal) Close() error { + return l.localACP.Close() +} + +func (l *ACPLocal) AddPolicy( + ctx context.Context, + creatorID string, + policy string, +) (string, error) { + // Having a creator identity is a MUST requirement for adding a policy. + if creatorID == "" { + return "", ErrPolicyCreatorMustNotBeEmpty + } + + if policy == "" { + return "", ErrPolicyDataMustNotBeEmpty + } + + // Assume policy is in YAML format by default. + policyMarshalType := types.PolicyMarshalingType_SHORT_YAML + if isJSON := fastjson.Validate(policy) == nil; isJSON { // Detect JSON format. + policyMarshalType = types.PolicyMarshalingType_SHORT_JSON + } + + createPolicy := types.MsgCreatePolicy{ + Creator: creatorID, + Policy: policy, + MarshalType: policyMarshalType, + CreationTime: protoTypes.TimestampNow(), + } + + createPolicyResponse, err := l.localACP.GetMsgService().CreatePolicy( + l.localACP.GetCtx(), + &createPolicy, + ) + + if err != nil { + return "", NewErrFailedToAddPolicyWithACP(err, "Local", creatorID) + } + + policyID := createPolicyResponse.Policy.Id + log.InfoContext(ctx, "Created Policy", corelog.Any("PolicyID", policyID)) + + return policyID, nil +} + +func (l *ACPLocal) ValidateResourceExistsOnValidDPI( + ctx context.Context, + policyID string, + resourceName string, +) error { + if policyID == "" && resourceName == "" { + return ErrNoPolicyArgs + } + + if policyID == "" { + return ErrPolicyIDMustNotBeEmpty + } + + if resourceName == "" { + return ErrResourceNameMustNotBeEmpty + } + + queryPolicyRequest := types.QueryPolicyRequest{Id: policyID} + queryPolicyResponse, err := l.localACP.GetQueryService().Policy( + l.localACP.GetCtx(), + &queryPolicyRequest, + ) + + if err != nil { + if errors.Is(err, types.ErrPolicyNotFound) { + return newErrPolicyDoesNotExistWithACP(err, policyID) + } else { + return newErrPolicyValidationFailedWithACP(err, policyID) + } + } + + // So far we validated that the policy exists, now lets validate that resource exists. + resourceResponse := queryPolicyResponse.Policy.GetResourceByName(resourceName) + if resourceResponse == nil { + return newErrResourceDoesNotExistOnTargetPolicy(resourceName, policyID) + } + + // Now that we have validated that policyID exists and it contains a corresponding + // resource with the matching name, validate that all required permissions + // for DPI actually exist on the target resource. + for _, requiredPermission := range dpiRequiredPermissions { + permissionResponse := resourceResponse.GetPermissionByName(requiredPermission) + if permissionResponse == nil { + return newErrResourceIsMissingRequiredPermission( + resourceName, + requiredPermission, + policyID, + ) + } + + // Now we need to ensure that the "owner" relation has access to all the required + // permissions for DPI. This is important because even if the policy has the required + // permissions under the resource, it's possible that those permissions are not granted + // to the "owner" relation, this will help users not shoot themseleves in the foot. + // TODO-ACP: Better validation, once sourcehub implements meta-policies. + // Issue: https://github.com/sourcenetwork/defradb/issues/2359 + if err := validateDPIExpressionOfRequiredPermission( + permissionResponse.Expression, + requiredPermission, + ); err != nil { + return err + } + } + + return nil +} + +func (l *ACPLocal) RegisterDocObject( + ctx context.Context, + actorID string, + policyID string, + resourceName string, + docID string, +) error { + registerDoc := types.MsgRegisterObject{ + Creator: actorID, + PolicyId: policyID, + Object: types.NewObject(resourceName, docID), + CreationTime: protoTypes.TimestampNow(), + } + + registerDocResponse, err := l.localACP.GetMsgService().RegisterObject( + l.localACP.GetCtx(), + ®isterDoc, + ) + + if err != nil { + return NewErrFailedToRegisterDocWithACP(err, "Local", policyID, actorID, resourceName, docID) + } + + switch registerDocResponse.Result { + case types.RegistrationResult_NoOp: + return ErrObjectDidNotRegister + + case types.RegistrationResult_Registered: + log.InfoContext( + ctx, + "Document registered with local acp", + corelog.Any("PolicyID", policyID), + corelog.Any("Creator", actorID), + corelog.Any("Resource", resourceName), + corelog.Any("DocID", docID), + ) + return nil + + case types.RegistrationResult_Unarchived: + log.InfoContext( + ctx, + "Document re-registered (unarchived object) with local acp", + corelog.Any("PolicyID", policyID), + corelog.Any("Creator", actorID), + corelog.Any("Resource", resourceName), + corelog.Any("DocID", docID), + ) + return nil + } + + return ErrObjectDidNotRegister +} + +func (l *ACPLocal) IsDocRegistered( + ctx context.Context, + policyID string, + resourceName string, + docID string, +) (bool, error) { + queryObjectOwner := types.QueryObjectOwnerRequest{ + PolicyId: policyID, + Object: types.NewObject(resourceName, docID), + } + + queryObjectOwnerResponse, err := l.localACP.GetQueryService().ObjectOwner( + l.localACP.GetCtx(), + &queryObjectOwner, + ) + if err != nil { + return false, NewErrFailedToCheckIfDocIsRegisteredWithACP(err, "Local", policyID, resourceName, docID) + } + + return queryObjectOwnerResponse.IsRegistered, nil +} + +func (l *ACPLocal) CheckDocAccess( + ctx context.Context, + permission DPIPermission, + actorID string, + policyID string, + resourceName string, + docID string, +) (bool, error) { + checkDoc := types.QueryVerifyAccessRequestRequest{ + PolicyId: policyID, + AccessRequest: &types.AccessRequest{ + Operations: []*types.Operation{ + { + Object: types.NewObject(resourceName, docID), + Permission: permission.String(), + }, + }, + Actor: &types.Actor{ + Id: actorID, + }, + }, + } + + checkDocResponse, err := l.localACP.GetQueryService().VerifyAccessRequest( + l.localACP.GetCtx(), + &checkDoc, + ) + if err != nil { + return false, NewErrFailedToVerifyDocAccessWithACP(err, "Local", policyID, actorID, resourceName, docID) + } + + if checkDocResponse.Valid { + log.InfoContext( + ctx, + "Document accessible", + corelog.Any("PolicyID", policyID), + corelog.Any("ActorID", actorID), + corelog.Any("Resource", resourceName), + corelog.Any("DocID", docID), + ) + return true, nil + } else { + log.InfoContext( + ctx, + "Document inaccessible", + corelog.Any("PolicyID", policyID), + corelog.Any("ActorID", actorID), + corelog.Any("Resource", resourceName), + corelog.Any("DocID", docID), + ) + return false, nil + } +} diff --git a/acp/acp_local_test.go b/acp/acp_local_test.go new file mode 100644 index 0000000000..9abdcb04d1 --- /dev/null +++ b/acp/acp_local_test.go @@ -0,0 +1,654 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" +) + +var identity1 = "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969" +var identity2 = "cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll" + +var validPolicyID string = "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" +var validPolicy string = ` +description: a policy + +actor: + name: actor + +resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + ` + +func Test_LocalACP_InMemory_StartAndClose_NoError(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + err := localACP.Start(ctx) + + require.Nil(t, err) + + err = localACP.Close() + require.Nil(t, err) +} + +func Test_LocalACP_PersistentMemory_StartAndClose_NoError(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + err := localACP.Start(ctx) + require.Nil(t, err) + + err = localACP.Close() + require.Nil(t, err) +} + +func Test_LocalACP_InMemory_AddPolicy_CanCreateTwice(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + + require.Equal( + t, + validPolicyID, + policyID, + ) + + errClose := localACP.Close() + require.Nil(t, errClose) + + // Since nothing is persisted should allow adding same policy again. + + localACP.Init(ctx, "") + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy = localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_AddPolicy_CanNotCreateTwice(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + errClose := localACP.Close() + require.Nil(t, errClose) + + // The above policy should remain persisted on restarting ACP. + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Should not allow us to create the same policy again as it exists already. + _, errAddPolicy = localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Error(t, errAddPolicy) + require.ErrorIs(t, errAddPolicy, ErrFailedToAddPolicyWithACP) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_InMemory_ValidateResourseExistsOrNot_ErrIfDoesntExist(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + errValidateResourceExists := localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "users", + ) + require.Nil(t, errValidateResourceExists) + + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "resourceDoesNotExist", + ) + require.Error(t, errValidateResourceExists) + require.ErrorIs(t, errValidateResourceExists, ErrResourceDoesNotExistOnTargetPolicy) + + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + "invalidPolicyID", + "resourceDoesNotExist", + ) + require.Error(t, errValidateResourceExists) + require.ErrorIs(t, errValidateResourceExists, ErrPolicyDoesNotExistWithACP) + + errClose := localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_ValidateResourseExistsOrNot_ErrIfDoesntExist(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + errValidateResourceExists := localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "users", + ) + require.Nil(t, errValidateResourceExists) + + // Resource should still exist even after a restart. + errClose := localACP.Close() + require.Nil(t, errClose) + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Do the same check after restart. + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "users", + ) + require.Nil(t, errValidateResourceExists) + + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + validPolicyID, + "resourceDoesNotExist", + ) + require.Error(t, errValidateResourceExists) + require.ErrorIs(t, errValidateResourceExists, ErrResourceDoesNotExistOnTargetPolicy) + + errValidateResourceExists = localACP.ValidateResourceExistsOnValidDPI( + ctx, + "invalidPolicyID", + "resourceDoesNotExist", + ) + require.Error(t, errValidateResourceExists) + require.ErrorIs(t, errValidateResourceExists, ErrPolicyDoesNotExistWithACP) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_InMemory_IsDocRegistered_TrueIfRegisteredFalseIfNotAndErrorOtherwise(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Invalid empty doc and empty resource can't be registered. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "", + "", + ) + require.Error(t, errRegisterDoc) + require.ErrorIs(t, errRegisterDoc, ErrFailedToRegisterDocWithACP) + + // Check if an invalid empty doc and empty resource is registered. + isDocRegistered, errDocRegistered := localACP.IsDocRegistered( + ctx, + validPolicyID, + "", + "", + ) + require.Error(t, errDocRegistered) + require.ErrorIs(t, errDocRegistered, ErrFailedToCheckIfDocIsRegisteredWithACP) + require.False(t, isDocRegistered) + + // No documents are registered right now so return false. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errDocRegistered) + require.False(t, isDocRegistered) + + // Register a document. + errRegisterDoc = localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Now it should be registered. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + + require.Nil(t, errDocRegistered) + require.True(t, isDocRegistered) + + errClose := localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_IsDocRegistered_TrueIfRegisteredFalseIfNotAndErrorOtherwise(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Invalid empty doc and empty resource can't be registered. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "", + "", + ) + require.Error(t, errRegisterDoc) + require.ErrorIs(t, errRegisterDoc, ErrFailedToRegisterDocWithACP) + + // Check if an invalid empty doc and empty resource is registered. + isDocRegistered, errDocRegistered := localACP.IsDocRegistered( + ctx, + validPolicyID, + "", + "", + ) + require.Error(t, errDocRegistered) + require.ErrorIs(t, errDocRegistered, ErrFailedToCheckIfDocIsRegisteredWithACP) + require.False(t, isDocRegistered) + + // No documents are registered right now so return false. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errDocRegistered) + require.False(t, isDocRegistered) + + // Register a document. + errRegisterDoc = localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Now it should be registered. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + + require.Nil(t, errDocRegistered) + require.True(t, isDocRegistered) + + // Should stay registered even after a restart. + errClose := localACP.Close() + require.Nil(t, errClose) + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Check after restart if it is still registered. + isDocRegistered, errDocRegistered = localACP.IsDocRegistered( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + ) + + require.Nil(t, errDocRegistered) + require.True(t, isDocRegistered) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_InMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErrorOtherwise(t *testing.T) { + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Invalid empty arguments such that we can't check doc access. + hasAccess, errCheckDocAccess := localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "", + "", + ) + require.Error(t, errCheckDocAccess) + require.ErrorIs(t, errCheckDocAccess, ErrFailedToVerifyDocAccessWithACP) + require.False(t, hasAccess) + + // Check document accesss for a document that does not exist. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // Register a document. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Now check using correct identity if it has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Now check using wrong identity, it should not have access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + errClose := localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErrorOtherwise(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + var localACP ACPLocal + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Invalid empty arguments such that we can't check doc access. + hasAccess, errCheckDocAccess := localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "", + "", + ) + require.Error(t, errCheckDocAccess) + require.ErrorIs(t, errCheckDocAccess, ErrFailedToVerifyDocAccessWithACP) + require.False(t, hasAccess) + + // Check document accesss for a document that does not exist. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // Register a document. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Now check using correct identity if it has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Now check using wrong identity, it should not have access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // identities should continue having their correct behaviour and access even after a restart. + errClose := localACP.Close() + require.Nil(t, errClose) + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Now check again after the restart using correct identity if it still has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Now check again after restart using wrong identity, it should continue to not have access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + errClose = localACP.Close() + require.Nil(t, errClose) +} diff --git a/acp/doc.go b/acp/doc.go new file mode 100644 index 0000000000..3fd60dd147 --- /dev/null +++ b/acp/doc.go @@ -0,0 +1,17 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +/* +Package acp utilizes the sourcehub acp module to bring the functionality +to defradb, this package also helps avoid the leakage of direct sourcehub +references through out the code base, and eases in swapping between local +use case and a more global on sourcehub use case. +*/ +package acp diff --git a/acp/dpi.go b/acp/dpi.go new file mode 100644 index 0000000000..85da972131 --- /dev/null +++ b/acp/dpi.go @@ -0,0 +1,73 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "strings" +) + +type DPIPermission int + +// Valid DefraDB Policy Interface Permission Type. +const ( + ReadPermission DPIPermission = iota + WritePermission +) + +// List of all valid DPI permissions, the order of permissions in this list must match +// the above defined ordering such that iota matches the index position within the list. +var dpiRequiredPermissions = []string{ + "read", + "write", +} + +func (dpiPermission DPIPermission) String() string { + return dpiRequiredPermissions[dpiPermission] +} + +const requiredRegistererRelationName string = "owner" + +// validateDPIExpressionOfRequiredPermission validates that the expression under the +// permission is valid. Moreover, DPI requires that for all required permissions, the +// expression start with "owner" then a space or symbol, and then follow-up expression. +// This is important because even if the policy has the required permissions under the +// resource, it's still possible that those permissions are not granted to the "owner" +// relation. This validation will help users not shoot themseleves in the foot. +// +// Learn more about the DefraDB Policy Interface [ACP](/acp/README.md), can find more +// detailed valid and invalid `expr` (expression) examples there. +func validateDPIExpressionOfRequiredPermission(expression string, requiredPermission string) error { + exprNoSpace := strings.ReplaceAll(expression, " ", "") + + if !strings.HasPrefix(exprNoSpace, requiredRegistererRelationName) { + return newErrExprOfRequiredPermissionMustStartWithRelation( + requiredPermission, + requiredRegistererRelationName, + ) + } + + restOfTheExpr := exprNoSpace[len(requiredRegistererRelationName):] + if len(restOfTheExpr) != 0 { + c := restOfTheExpr[0] + // First non-space character after the required relation name MUST be a `+`. + // The reason we are enforcing this here is because other set operations are + // not applied to the registerer relation anyways. + if c != '+' { + return newErrExprOfRequiredPermissionHasInvalidChar( + requiredPermission, + requiredRegistererRelationName, + c, + ) + } + } + + return nil +} diff --git a/acp/errors.go b/acp/errors.go new file mode 100644 index 0000000000..307b32f5ad --- /dev/null +++ b/acp/errors.go @@ -0,0 +1,207 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package acp + +import ( + "github.com/sourcenetwork/defradb/errors" +) + +const ( + errInitializationOfACPFailed = "initialization of acp failed" + errStartingACPInEmptyPath = "starting acp in an empty path" + errFailedToAddPolicyWithACP = "failed to add policy with acp" + errFailedToRegisterDocWithACP = "failed to register document with acp" + errFailedToCheckIfDocIsRegisteredWithACP = "failed to check if doc is registered with acp" + errFailedToVerifyDocAccessWithACP = "failed to verify doc access with acp" + + errObjectDidNotRegister = "no-op while registering object (already exists or error) with acp" + errNoPolicyArgs = "missing policy arguments, must have both id and resource" + + errPolicyIDMustNotBeEmpty = "policyID must not be empty" + errPolicyDoesNotExistWithACP = "policyID specified does not exist with acp" + errPolicyValidationFailedWithACP = "policyID validation through acp failed" + + errResourceNameMustNotBeEmpty = "resource name must not be empty" + errResourceDoesNotExistOnTargetPolicy = "resource does not exist on the specified policy" + errResourceIsMissingRequiredPermission = "resource is missing required permission on policy" + + errExprOfRequiredPermMustStartWithRelation = "expr of required permission must start with required relation" + errExprOfRequiredPermHasInvalidChar = "expr of required permission has invalid character after relation" +) + +var ( + ErrInitializationOfACPFailed = errors.New(errInitializationOfACPFailed) + ErrFailedToAddPolicyWithACP = errors.New(errFailedToAddPolicyWithACP) + ErrFailedToRegisterDocWithACP = errors.New(errFailedToRegisterDocWithACP) + ErrFailedToCheckIfDocIsRegisteredWithACP = errors.New(errFailedToCheckIfDocIsRegisteredWithACP) + ErrFailedToVerifyDocAccessWithACP = errors.New(errFailedToVerifyDocAccessWithACP) + ErrPolicyDoesNotExistWithACP = errors.New(errPolicyDoesNotExistWithACP) + + ErrResourceDoesNotExistOnTargetPolicy = errors.New(errResourceDoesNotExistOnTargetPolicy) + + ErrPolicyDataMustNotBeEmpty = errors.New("policy data can not be empty") + ErrPolicyCreatorMustNotBeEmpty = errors.New("policy creator can not be empty") + ErrObjectDidNotRegister = errors.New(errObjectDidNotRegister) + ErrNoPolicyArgs = errors.New(errNoPolicyArgs) + ErrPolicyIDMustNotBeEmpty = errors.New(errPolicyIDMustNotBeEmpty) + ErrResourceNameMustNotBeEmpty = errors.New(errResourceNameMustNotBeEmpty) +) + +func NewErrInitializationOfACPFailed( + inner error, + Type string, + path string, +) error { + return errors.Wrap( + errInitializationOfACPFailed, + inner, + errors.NewKV("Type", Type), + errors.NewKV("Path", path), + ) +} + +func NewErrFailedToAddPolicyWithACP( + inner error, + Type string, + creatorID string, +) error { + return errors.Wrap( + errFailedToAddPolicyWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("CreatorID", creatorID), + ) +} + +func NewErrFailedToRegisterDocWithACP( + inner error, + Type string, + policyID string, + creatorID string, + resourceName string, + docID string, +) error { + return errors.Wrap( + errFailedToRegisterDocWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("PolicyID", policyID), + errors.NewKV("CreatorID", creatorID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + ) +} + +func NewErrFailedToCheckIfDocIsRegisteredWithACP( + inner error, + Type string, + policyID string, + resourceName string, + docID string, +) error { + return errors.Wrap( + errFailedToCheckIfDocIsRegisteredWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + ) +} + +func NewErrFailedToVerifyDocAccessWithACP( + inner error, + Type string, + policyID string, + actorID string, + resourceName string, + docID string, +) error { + return errors.Wrap( + errFailedToVerifyDocAccessWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("PolicyID", policyID), + errors.NewKV("ActorID", actorID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + ) +} + +func newErrPolicyDoesNotExistWithACP( + inner error, + policyID string, +) error { + return errors.Wrap( + errPolicyDoesNotExistWithACP, + inner, + errors.NewKV("PolicyID", policyID), + ) +} + +func newErrPolicyValidationFailedWithACP( + inner error, + policyID string, +) error { + return errors.Wrap( + errPolicyValidationFailedWithACP, + inner, + errors.NewKV("PolicyID", policyID), + ) +} + +func newErrResourceDoesNotExistOnTargetPolicy( + resourceName string, + policyID string, +) error { + return errors.New( + errResourceDoesNotExistOnTargetPolicy, + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + ) +} + +func newErrResourceIsMissingRequiredPermission( + resourceName string, + permission string, + policyID string, +) error { + return errors.New( + errResourceIsMissingRequiredPermission, + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("Permission", permission), + ) +} + +func newErrExprOfRequiredPermissionMustStartWithRelation( + permission string, + relation string, +) error { + return errors.New( + errExprOfRequiredPermMustStartWithRelation, + errors.NewKV("Permission", permission), + errors.NewKV("Relation", relation), + ) +} + +func newErrExprOfRequiredPermissionHasInvalidChar( + permission string, + relation string, + char byte, +) error { + return errors.New( + errExprOfRequiredPermHasInvalidChar, + errors.NewKV("Permission", permission), + errors.NewKV("Relation", relation), + errors.NewKV("Character", string(char)), + ) +} diff --git a/acp/identity/identity.go b/acp/identity/identity.go new file mode 100644 index 0000000000..ba6efb71fa --- /dev/null +++ b/acp/identity/identity.go @@ -0,0 +1,35 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +/* +Package identity provides defradb identity. +*/ + +package identity + +import ( + "github.com/sourcenetwork/immutable" +) + +var ( + // NoIdentity is an empty identity. + NoIdentity = immutable.None[string]() +) + +// NewIdentity makes a new identity if the input is not empty otherwise, returns an empty Option. +func NewIdentity(identity string) immutable.Option[string] { + // TODO-ACP: There will be more validation once sourcehub gets some utilities. + // Then a validation function would do the validation, will likely do outside this function. + // https://github.com/sourcenetwork/defradb/issues/2358 + if identity == "" { + return NoIdentity + } + return immutable.Some[string](identity) +} diff --git a/cli/acp.go b/cli/acp.go new file mode 100644 index 0000000000..30705ac908 --- /dev/null +++ b/cli/acp.go @@ -0,0 +1,29 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeACPCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "acp", + Short: "Interact with the access control system of a DefraDB node", + Long: `Interact with the access control system of a DefraDB node + +Learn more about [ACP](/acp/README.md) + + `, + } + + return cmd +} diff --git a/cli/acp_policy.go b/cli/acp_policy.go new file mode 100644 index 0000000000..92ae9321f0 --- /dev/null +++ b/cli/acp_policy.go @@ -0,0 +1,25 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeACPPolicyCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "policy", + Short: "Interact with the acp policy features of DefraDB instance", + Long: `Interact with the acp policy features of DefraDB instance`, + } + + return cmd +} diff --git a/cli/acp_policy_add.go b/cli/acp_policy_add.go new file mode 100644 index 0000000000..01914b37c6 --- /dev/null +++ b/cli/acp_policy_add.go @@ -0,0 +1,139 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "io" + "os" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/acp" +) + +func MakeACPPolicyAddCommand() *cobra.Command { + const identityFlagLongRequired string = "identity" + const identityFlagShortRequired string = "i" + + const fileFlagLong string = "file" + const fileFlagShort string = "f" + + var identityValue string + var policyFile string + + var cmd = &cobra.Command{ + Use: "add [-i --identity] [policy]", + Short: "Add new policy", + Long: `Add new policy + +Notes: + - Can not add a policy without specifying an identity. + - ACP must be available (i.e. ACP can not be disabled). + - A non-DPI policy will be accepted (will be registered with acp system). + - But only a valid DPI policyID & resource can be specified on a schema. + - DPI validation happens when attempting to add a schema with '@policy'. + - Learn more about [ACP & DPI Rules](/acp/README.md) + +Example: add from an argument string: + defradb client acp policy add -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j ' +description: A Valid DefraDB Policy Interface + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor +' + +Example: add from file: + defradb client acp policy add -i cosmos17r39df0hdcrgnmmw4mvu7qgk5nu888c7uvv37y -f policy.yml + +Example: add from file, verbose flags: + defradb client acp policy add --identity cosmos1kpw734v54g0t0d8tcye8ee5jc3gld0tcr2q473 --file policy.yml + +Example: add from stdin: + cat policy.yml | defradb client acp policy add - + +`, + RunE: func(cmd *cobra.Command, args []string) error { + if identityValue == "" { + return acp.ErrPolicyCreatorMustNotBeEmpty + } + + // TODO-ACP: Ensure here (before going through acp system) if the required identity argument + // is valid, if it is valid then keep proceeding further, otherwise return this error: + // `NewErrRequiredFlagInvalid(identityFlagLongRequired, identityFlagShortRequired)` + // Issue: https://github.com/sourcenetwork/defradb/issues/2358 + + // Handle policy argument. + extraArgsProvided := len(args) + var policy string + switch { + case policyFile != "": + data, err := os.ReadFile(policyFile) + if err != nil { + return err + } + policy = string(data) + + case extraArgsProvided > 0 && args[extraArgsProvided-1] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) + if err != nil { + return err + } + policy = string(data) + + case extraArgsProvided > 0: + policy = args[0] + + default: + return ErrPolicyFileArgCanNotBeEmpty + } + + db := mustGetContextDB(cmd) + policyResult, err := db.AddPolicy( + cmd.Context(), + identityValue, + policy, + ) + + if err != nil { + return err + } + + return writeJSON(cmd, policyResult) + }, + } + cmd.Flags().StringVarP(&policyFile, fileFlagLong, fileFlagShort, "", "File to load a policy from") + cmd.Flags().StringVarP( + &identityValue, + identityFlagLongRequired, + identityFlagShortRequired, + "", + "[Required] Identity of the creator", + ) + _ = cmd.MarkFlagRequired(identityFlagLongRequired) + + return cmd +} diff --git a/cli/cli.go b/cli/cli.go index 33f58ac8e9..38209a9f69 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -62,6 +62,16 @@ func NewDefraCommand() *cobra.Command { schema_migrate, ) + policy := MakeACPPolicyCommand() + policy.AddCommand( + MakeACPPolicyAddCommand(), + ) + + acp := MakeACPCommand() + acp.AddCommand( + policy, + ) + view := MakeViewCommand() view.AddCommand( MakeViewAddCommand(), @@ -103,6 +113,7 @@ func NewDefraCommand() *cobra.Command { MakeDumpCommand(), MakeRequestCommand(), schema, + acp, view, index, p2p, diff --git a/cli/collection_create.go b/cli/collection_create.go index efeee61494..0af57d77ed 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -16,34 +16,41 @@ import ( "github.com/spf13/cobra" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) func MakeCollectionCreateCommand() *cobra.Command { + const identityFlagLongRequired string = "identity" + const identityFlagShortRequired string = "i" + + var identityValue string var file string + var cmd = &cobra.Command{ - Use: "create ", + Use: "create [-i --identity] ", Short: "Create a new document.", Long: `Create a new document. -Example: create from string +Example: create from string: defradb client collection create --name User '{ "name": "Bob" }' -Example: create multiple from string +Example: create from string, with identity: + defradb client collection create -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User '{ "name": "Bob" }' + +Example: create multiple from string: defradb client collection create --name User '[{ "name": "Alice" }, { "name": "Bob" }]' -Example: create from file +Example: create from file: defradb client collection create --name User -f document.json -Example: create from stdin +Example: create from stdin: cat document.json | defradb client collection create --name User - `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - col, ok := tryGetContextCollection(cmd) - if !ok { - return cmd.Usage() - } + // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. + identity := acpIdentity.NewIdentity(identityValue) var docData []byte switch { @@ -65,21 +72,33 @@ Example: create from stdin return ErrNoDocOrFile } + col, ok := tryGetContextCollection(cmd) + if !ok { + return cmd.Usage() + } + if client.IsJSONArray(docData) { docs, err := client.NewDocsFromJSON(docData, col.Schema()) if err != nil { return err } - return col.CreateMany(cmd.Context(), docs) + return col.CreateMany(cmd.Context(), identity, docs) } doc, err := client.NewDocFromJSON(docData, col.Schema()) if err != nil { return err } - return col.Create(cmd.Context(), doc) + return col.Create(cmd.Context(), identity, doc) }, } cmd.Flags().StringVarP(&file, "file", "f", "", "File containing document(s)") + cmd.Flags().StringVarP( + &identityValue, + identityFlagLongRequired, + identityFlagShortRequired, + "", + "Identity of the actor", + ) return cmd } diff --git a/cli/collection_delete.go b/cli/collection_delete.go index d1f945d9ae..1d1c128948 100644 --- a/cli/collection_delete.go +++ b/cli/collection_delete.go @@ -13,24 +13,35 @@ package cli import ( "github.com/spf13/cobra" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) func MakeCollectionDeleteCommand() *cobra.Command { + const identityFlagLongRequired string = "identity" + const identityFlagShortRequired string = "i" + + var identityValue string var argDocIDs []string var filter string var cmd = &cobra.Command{ - Use: "delete [--filter --docID ]", + Use: "delete [-i --identity] [--filter --docID ]", Short: "Delete documents by docID or filter.", Long: `Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by docID(s) - defradb client collection delete --name User --docID bae-123,bae-456 +Example: delete by docID(s): + defradb client collection delete --name User --docID bae-123,bae-456 + +Example: delete by docID(s) with identity: + defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User --docID bae-123,bae-456 -Example: delete by filter +Example: delete by filter: defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' `, RunE: func(cmd *cobra.Command, args []string) error { + // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. + identity := acpIdentity.NewIdentity(identityValue) + col, ok := tryGetContextCollection(cmd) if !ok { return cmd.Usage() @@ -42,7 +53,7 @@ Example: delete by filter if err != nil { return err } - res, err := col.DeleteWithDocID(cmd.Context(), docID) + res, err := col.DeleteWithDocID(cmd.Context(), identity, docID) if err != nil { return err } @@ -56,13 +67,13 @@ Example: delete by filter } docIDs[i] = docID } - res, err := col.DeleteWithDocIDs(cmd.Context(), docIDs) + res, err := col.DeleteWithDocIDs(cmd.Context(), identity, docIDs) if err != nil { return err } return writeJSON(cmd, res) case filter != "": - res, err := col.DeleteWithFilter(cmd.Context(), filter) + res, err := col.DeleteWithFilter(cmd.Context(), identity, filter) if err != nil { return err } @@ -74,5 +85,12 @@ Example: delete by filter } cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") + cmd.Flags().StringVarP( + &identityValue, + identityFlagLongRequired, + identityFlagShortRequired, + "", + "Identity of the actor", + ) return cmd } diff --git a/cli/collection_get.go b/cli/collection_get.go index 55c84d6289..1a924ea1aa 100644 --- a/cli/collection_get.go +++ b/cli/collection_get.go @@ -13,21 +13,32 @@ package cli import ( "github.com/spf13/cobra" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) func MakeCollectionGetCommand() *cobra.Command { + const identityFlagLongRequired string = "identity" + const identityFlagShortRequired string = "i" + + var identityValue string var showDeleted bool var cmd = &cobra.Command{ - Use: "get [--show-deleted]", + Use: "get [-i --identity] [--show-deleted] ", Short: "View document fields.", Long: `View document fields. Example: defradb client collection get --name User bae-123 + +Example to get a private document we must use an identity: + defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User bae-123 `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { + // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. + identity := acpIdentity.NewIdentity(identityValue) + col, ok := tryGetContextCollection(cmd) if !ok { return cmd.Usage() @@ -37,7 +48,7 @@ Example: if err != nil { return err } - doc, err := col.Get(cmd.Context(), docID, showDeleted) + doc, err := col.Get(cmd.Context(), identity, docID, showDeleted) if err != nil { return err } @@ -49,5 +60,12 @@ Example: }, } cmd.Flags().BoolVar(&showDeleted, "show-deleted", false, "Show deleted documents") + cmd.Flags().StringVarP( + &identityValue, + identityFlagLongRequired, + identityFlagShortRequired, + "", + "Identity of the actor", + ) return cmd } diff --git a/cli/collection_list_doc_ids.go b/cli/collection_list_doc_ids.go index 7112a88817..10f6d879bf 100644 --- a/cli/collection_list_doc_ids.go +++ b/cli/collection_list_doc_ids.go @@ -13,25 +13,37 @@ package cli import ( "github.com/spf13/cobra" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/http" ) func MakeCollectionListDocIDsCommand() *cobra.Command { + const identityFlagLongRequired string = "identity" + const identityFlagShortRequired string = "i" + + var identityValue string + var cmd = &cobra.Command{ - Use: "docIDs", + Use: "docIDs [-i --identity]", Short: "List all document IDs (docIDs).", Long: `List all document IDs (docIDs). -Example: +Example: list all docID(s): defradb client collection docIDs --name User + +Example: list all docID(s), with an identity: + defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User `, RunE: func(cmd *cobra.Command, args []string) error { + // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. + identity := acpIdentity.NewIdentity(identityValue) + col, ok := tryGetContextCollection(cmd) if !ok { return cmd.Usage() } - docCh, err := col.GetAllDocIDs(cmd.Context()) + docCh, err := col.GetAllDocIDs(cmd.Context(), identity) if err != nil { return err } @@ -49,5 +61,12 @@ Example: return nil }, } + cmd.Flags().StringVarP( + &identityValue, + identityFlagLongRequired, + identityFlagShortRequired, + "", + "Identity of the actor", + ) return cmd } diff --git a/cli/collection_update.go b/cli/collection_update.go index 42354948a9..816cad8029 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -13,31 +13,43 @@ package cli import ( "github.com/spf13/cobra" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) func MakeCollectionUpdateCommand() *cobra.Command { + const identityFlagLongRequired string = "identity" + const identityFlagShortRequired string = "i" + + var identityValue string var argDocIDs []string var filter string var updater string var cmd = &cobra.Command{ - Use: "update [--filter --docID --updater ] ", + Use: "update [-i --identity] [--filter --docID --updater ] ", Short: "Update documents by docID or filter.", Long: `Update documents by docID or filter. -Example: update from string +Example: update from string: defradb client collection update --name User --docID bae-123 '{ "name": "Bob" }' -Example: update by filter +Example: update by filter: defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by docIDs +Example: update by docIDs: defradb client collection update --name User \ + --docID bae-123,bae-456 --updater '{ "verified": true }' + +Example: update private docIDs, with identity: + defradb client collection update -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User \ --docID bae-123,bae-456 --updater '{ "verified": true }' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { + // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. + identity := acpIdentity.NewIdentity(identityValue) + col, ok := tryGetContextCollection(cmd) if !ok { return cmd.Usage() @@ -49,7 +61,7 @@ Example: update by docIDs if err != nil { return err } - res, err := col.UpdateWithDocID(cmd.Context(), docID, updater) + res, err := col.UpdateWithDocID(cmd.Context(), identity, docID, updater) if err != nil { return err } @@ -63,13 +75,13 @@ Example: update by docIDs } docIDs[i] = docID } - res, err := col.UpdateWithDocIDs(cmd.Context(), docIDs, updater) + res, err := col.UpdateWithDocIDs(cmd.Context(), identity, docIDs, updater) if err != nil { return err } return writeJSON(cmd, res) case filter != "" && updater != "": - res, err := col.UpdateWithFilter(cmd.Context(), filter, updater) + res, err := col.UpdateWithFilter(cmd.Context(), identity, filter, updater) if err != nil { return err } @@ -79,14 +91,14 @@ Example: update by docIDs if err != nil { return err } - doc, err := col.Get(cmd.Context(), docID, true) + doc, err := col.Get(cmd.Context(), identity, docID, true) if err != nil { return err } if err := doc.SetWithJSON([]byte(args[0])); err != nil { return err } - return col.Update(cmd.Context(), doc) + return col.Update(cmd.Context(), identity, doc) default: return ErrNoDocIDOrFilter } @@ -95,5 +107,12 @@ Example: update by docIDs cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") cmd.Flags().StringVar(&updater, "updater", "", "Document updater") + cmd.Flags().StringVarP( + &identityValue, + identityFlagLongRequired, + identityFlagShortRequired, + "", + "Identity of the actor", + ) return cmd } diff --git a/cli/dump.go b/cli/dump.go index a3d155605b..76b36bab99 100644 --- a/cli/dump.go +++ b/cli/dump.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeDumpCommand() *cobra.Command { @@ -21,7 +19,7 @@ func MakeDumpCommand() *cobra.Command { Use: "dump", Short: "Dump the contents of DefraDB node-side", RunE: func(cmd *cobra.Command, _ []string) (err error) { - db := cmd.Context().Value(dbContextKey).(client.DB) + db := mustGetContextDB(cmd) return db.PrintDump(cmd.Context()) }, } diff --git a/cli/errors.go b/cli/errors.go index bb124bc7f9..02cd252b59 100644 --- a/cli/errors.go +++ b/cli/errors.go @@ -11,25 +11,37 @@ package cli import ( + "fmt" + "github.com/sourcenetwork/defradb/errors" ) const ( errInvalidLensConfig string = "invalid lens configuration" errSchemaVersionNotOfSchema string = "the given schema version is from a different schema" + errRequiredFlag string = "the required flag [--%s|-%s] is %s" ) var ( - ErrNoDocOrFile = errors.New("document or file must be defined") - ErrInvalidDocument = errors.New("invalid document") - ErrNoDocIDOrFilter = errors.New("docID or filter must be defined") - ErrInvalidExportFormat = errors.New("invalid export format") - ErrNoLensConfig = errors.New("lens config cannot be empty") - ErrInvalidLensConfig = errors.New("invalid lens configuration") - ErrSchemaVersionNotOfSchema = errors.New(errSchemaVersionNotOfSchema) - ErrViewAddMissingArgs = errors.New("please provide a base query and output SDL for this view") + ErrNoDocOrFile = errors.New("document or file must be defined") + ErrInvalidDocument = errors.New("invalid document") + ErrNoDocIDOrFilter = errors.New("docID or filter must be defined") + ErrInvalidExportFormat = errors.New("invalid export format") + ErrNoLensConfig = errors.New("lens config cannot be empty") + ErrInvalidLensConfig = errors.New("invalid lens configuration") + ErrSchemaVersionNotOfSchema = errors.New(errSchemaVersionNotOfSchema) + ErrViewAddMissingArgs = errors.New("please provide a base query and output SDL for this view") + ErrPolicyFileArgCanNotBeEmpty = errors.New("policy file argument can not be empty") ) +func NewErrRequiredFlagEmpty(longName string, shortName string) error { + return errors.New(fmt.Sprintf(errRequiredFlag, longName, shortName, "empty")) +} + +func NewErrRequiredFlagInvalid(longName string, shortName string) error { + return errors.New(fmt.Sprintf(errRequiredFlag, longName, shortName, "invalid")) +} + func NewErrInvalidLensConfig(inner error) error { return errors.Wrap(errInvalidLensConfig, inner) } diff --git a/cli/request.go b/cli/request.go index d5e37e79a3..c583d51a28 100644 --- a/cli/request.go +++ b/cli/request.go @@ -16,6 +16,7 @@ import ( "github.com/spf13/cobra" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/errors" ) @@ -25,9 +26,13 @@ const ( ) func MakeRequestCommand() *cobra.Command { + const identityFlagLongRequired string = "identity" + const identityFlagShortRequired string = "i" + + var identityValue string var filePath string var cmd = &cobra.Command{ - Use: "query [query request]", + Use: "query [-i --identity] [request]", Short: "Send a DefraDB GraphQL query request", Long: `Send a DefraDB GraphQL query request to the database. @@ -37,6 +42,9 @@ A query request can be sent as a single argument. Example command: Do a query request from a file by using the '-f' flag. Example command: defradb client query -f request.graphql +Do a query request from a file and with an identity. Example command: + defradb client query -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j -f request.graphql + Or it can be sent via stdin by using the '-' special syntax. Example command: cat request.graphql | defradb client query - @@ -45,7 +53,8 @@ with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - store := mustGetContextStore(cmd) + // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. + identity := acpIdentity.NewIdentity(identityValue) var request string switch { @@ -68,7 +77,9 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so if request == "" { return errors.New("request cannot be empty") } - result := store.ExecRequest(cmd.Context(), request) + + store := mustGetContextStore(cmd) + result := store.ExecRequest(cmd.Context(), identity, request) var errors []string for _, err := range result.GQL.Errors { @@ -87,5 +98,12 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so } cmd.Flags().StringVarP(&filePath, "file", "f", "", "File containing the query request") + cmd.Flags().StringVarP( + &identityValue, + identityFlagLongRequired, + identityFlagShortRequired, + "", + "Identity of the actor", + ) return cmd } diff --git a/cli/schema_add.go b/cli/schema_add.go index f987d062df..e81896322d 100644 --- a/cli/schema_add.go +++ b/cli/schema_add.go @@ -25,6 +25,11 @@ func MakeSchemaAddCommand() *cobra.Command { Short: "Add new schema", Long: `Add new schema. +Schema Object with a '@policy(id:".." resource: "..")' linked will only be accepted if: + - ACP is available (i.e. ACP is not disabled). + - The specified resource adheres to the Document Access Control DPI Rules. + - Learn more about [ACP & DPI Rules](/acp/README.md) + Example: add from an argument string: defradb client schema add 'type Foo { ... }' diff --git a/cli/start.go b/cli/start.go index 90ca08d77a..ca9267e7e9 100644 --- a/cli/start.go +++ b/cli/start.go @@ -50,6 +50,10 @@ func MakeStartCommand() *cobra.Command { dbOpts := []db.Option{ db.WithUpdateEvents(), db.WithMaxRetries(cfg.GetInt("datastore.MaxTxnRetries")), + // TODO-ACP: Infuture when we add support for the --no-acp flag when admin signatures are in, + // we can allow starting of db without acp. Currently that can only be done programmatically. + // https://github.com/sourcenetwork/defradb/issues/2271 + db.WithACPInMemory(), } netOpts := []net.NodeOpt{ @@ -84,12 +88,17 @@ func MakeStartCommand() *cobra.Command { // Running with memory store mode will always generate a random key. // Adding support for an ephemeral mode and moving the key to the // config would solve both of these issues. - rootdir := mustGetContextRootDir(cmd) - key, err := loadOrGeneratePrivateKey(filepath.Join(rootdir, "data", "key")) + rootDir := mustGetContextRootDir(cmd) + key, err := loadOrGeneratePrivateKey(filepath.Join(rootDir, "data", "key")) if err != nil { return err } netOpts = append(netOpts, net.WithPrivateKey(key)) + + // TODO-ACP: Infuture when we add support for the --no-acp flag when admin signatures are in, + // we can allow starting of db without acp. Currently that can only be done programmatically. + // https://github.com/sourcenetwork/defradb/issues/2271 + dbOpts = append(dbOpts, db.WithACP(rootDir)) } opts := []node.NodeOpt{ diff --git a/cli/tx_create.go b/cli/tx_create.go index da239b6943..5190ba20f7 100644 --- a/cli/tx_create.go +++ b/cli/tx_create.go @@ -13,7 +13,6 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) @@ -25,7 +24,7 @@ func MakeTxCreateCommand() *cobra.Command { Short: "Create a new DefraDB transaction.", Long: `Create a new DefraDB transaction.`, RunE: func(cmd *cobra.Command, args []string) (err error) { - db := cmd.Context().Value(dbContextKey).(client.DB) + db := mustGetContextDB(cmd) var tx datastore.Txn if concurrent { diff --git a/cli/utils.go b/cli/utils.go index caeb282606..f923021fcf 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -50,6 +50,13 @@ var ( colContextKey = contextKey("col") ) +// mustGetContextDB returns the db for the current command context. +// +// If a db is not set in the current context this function panics. +func mustGetContextDB(cmd *cobra.Command) client.DB { + return cmd.Context().Value(dbContextKey).(client.DB) +} + // mustGetContextStore returns the store for the current command context. // // If a store is not set in the current context this function panics. diff --git a/client/collection.go b/client/collection.go index 58b53c3af0..aa219b3a74 100644 --- a/client/collection.go +++ b/client/collection.go @@ -46,12 +46,12 @@ type Collection interface { // Create a new document. // // Will verify the DocID/CID to ensure that the new document is correctly formatted. - Create(context.Context, *Document) error + Create(ctx context.Context, identity immutable.Option[string], doc *Document) error // CreateMany new documents. // // Will verify the DocIDs/CIDs to ensure that the new documents are correctly formatted. - CreateMany(context.Context, []*Document) error + CreateMany(ctx context.Context, identity immutable.Option[string], docs []*Document) error // Update an existing document with the new values. // @@ -59,25 +59,26 @@ type Collection interface { // Any field that is nil/empty that hasn't called Clear will be ignored. // // Will return a ErrDocumentNotFound error if the given document is not found. - Update(context.Context, *Document) error + Update(ctx context.Context, identity immutable.Option[string], docs *Document) error // Save the given document in the database. // // If a document exists with the given DocID it will update it. Otherwise a new document // will be created. - Save(context.Context, *Document) error + Save(ctx context.Context, identity immutable.Option[string], doc *Document) error // Delete will attempt to delete a document by DocID. // // Will return true if a deletion is successful, and return false along with an error // if it cannot. If the document doesn't exist, then it will return false and a ErrDocumentNotFound error. - // This operation will hard-delete all state relating to the given DocID. This includes data, block, and head storage. - Delete(context.Context, DocID) (bool, error) + // This operation will hard-delete all state relating to the given DocID. + // This includes data, block, and head storage. + Delete(ctx context.Context, identity immutable.Option[string], docID DocID) (bool, error) // Exists checks if a given document exists with supplied DocID. // // Will return true if a matching document exists, otherwise will return false. - Exists(context.Context, DocID) (bool, error) + Exists(ctx context.Context, identity immutable.Option[string], docID DocID) (bool, error) // UpdateWith updates a target document using the given updater type. // @@ -88,13 +89,23 @@ type Collection interface { // // Returns an ErrInvalidUpdateTarget error if the target type is not supported. // Returns an ErrInvalidUpdater error if the updater type is not supported. - UpdateWith(ctx context.Context, target any, updater string) (*UpdateResult, error) + UpdateWith( + ctx context.Context, + identity immutable.Option[string], + target any, + updater string, + ) (*UpdateResult, error) // UpdateWithFilter updates using a filter to target documents for update. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch // else an ErrInvalidUpdater will be returned. - UpdateWithFilter(ctx context.Context, filter any, updater string) (*UpdateResult, error) + UpdateWithFilter( + ctx context.Context, + identity immutable.Option[string], + filter any, + updater string, + ) (*UpdateResult, error) // UpdateWithDocID updates using a DocID to target a single document for update. // @@ -102,7 +113,12 @@ type Collection interface { // else an ErrInvalidUpdater will be returned. // // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. - UpdateWithDocID(ctx context.Context, docID DocID, updater string) (*UpdateResult, error) + UpdateWithDocID( + ctx context.Context, + identity immutable.Option[string], + docID DocID, + updater string, + ) (*UpdateResult, error) // UpdateWithDocIDs updates documents matching the given DocIDs. // @@ -110,7 +126,12 @@ type Collection interface { // else an ErrInvalidUpdater will be returned. // // Returns an ErrDocumentNotFound if a document is not found for any given DocID. - UpdateWithDocIDs(context.Context, []DocID, string) (*UpdateResult, error) + UpdateWithDocIDs( + ctx context.Context, + identity immutable.Option[string], + docIDs []DocID, + updater string, + ) (*UpdateResult, error) // DeleteWith deletes a target document. // @@ -121,13 +142,21 @@ type Collection interface { // with a status of `Deleted`. // // Returns an ErrInvalidDeleteTarget if the target type is not supported. - DeleteWith(ctx context.Context, target any) (*DeleteResult, error) + DeleteWith( + ctx context.Context, + identity immutable.Option[string], + target any, + ) (*DeleteResult, error) // DeleteWithFilter deletes documents matching the given filter. // // This operation will soft-delete documents related to the given filter and update the composite block // with a status of `Deleted`. - DeleteWithFilter(ctx context.Context, filter any) (*DeleteResult, error) + DeleteWithFilter( + ctx context.Context, + identity immutable.Option[string], + filter any, + ) (*DeleteResult, error) // DeleteWithDocID deletes using a DocID to target a single document for delete. // @@ -135,7 +164,11 @@ type Collection interface { // with a status of `Deleted`. // // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. - DeleteWithDocID(context.Context, DocID) (*DeleteResult, error) + DeleteWithDocID( + ctx context.Context, + identity immutable.Option[string], + docID DocID, + ) (*DeleteResult, error) // DeleteWithDocIDs deletes documents matching the given DocIDs. // @@ -143,25 +176,35 @@ type Collection interface { // with a status of `Deleted`. // // Returns an ErrDocumentNotFound if a document is not found for any given DocID. - DeleteWithDocIDs(context.Context, []DocID) (*DeleteResult, error) + DeleteWithDocIDs( + ctx context.Context, + identity immutable.Option[string], + docIDs []DocID, + ) (*DeleteResult, error) // Get returns the document with the given DocID. // // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. - Get(ctx context.Context, docID DocID, showDeleted bool) (*Document, error) + Get( + ctx context.Context, + identity immutable.Option[string], + docID DocID, + showDeleted bool, + ) (*Document, error) // WithTxn returns a new instance of the collection, with a transaction // handle instead of a raw DB handle. WithTxn(datastore.Txn) Collection // GetAllDocIDs returns all the document IDs that exist in the collection. - GetAllDocIDs(ctx context.Context) (<-chan DocIDResult, error) + GetAllDocIDs(ctx context.Context, identity immutable.Option[string]) (<-chan DocIDResult, error) // CreateIndex creates a new index on the collection. // `IndexDescription` contains the description of the index to be created. // `IndexDescription.Name` must start with a letter or an underscore and can // only contain letters, numbers, and underscores. // If the name of the index is not provided, it will be generated. + // WARNING: This method can not create index for a collection that has a policy. CreateIndex(context.Context, IndexDescription) (IndexDescription, error) // DropIndex drops an index from the collection. diff --git a/client/collection_description.go b/client/collection_description.go index 2e3e10aa36..2db34ddb8b 100644 --- a/client/collection_description.go +++ b/client/collection_description.go @@ -65,6 +65,16 @@ type CollectionDescription struct { // Indexes contains the secondary indexes that this Collection has. Indexes []IndexDescription + + // Policy contains the policy information on this collection. + // + // It is possible for a collection to not have a policy, a collection + // without a policy has no access control. + // + // Note: The policy information must be validated using acp right after + // parsing is done, to avoid storing an invalid policyID or policy resource + // that may not even exist on acp. + Policy immutable.Option[PolicyDescription] } // QuerySource represents a collection data source from a query. @@ -166,6 +176,7 @@ type collectionDescription struct { ID uint32 RootID uint32 SchemaVersionID string + Policy immutable.Option[PolicyDescription] Indexes []IndexDescription Fields []CollectionFieldDescription @@ -187,6 +198,7 @@ func (c *CollectionDescription) UnmarshalJSON(bytes []byte) error { c.Indexes = descMap.Indexes c.Fields = descMap.Fields c.Sources = make([]any, len(descMap.Sources)) + c.Policy = descMap.Policy for i, source := range descMap.Sources { sourceJson, err := json.Marshal(source) diff --git a/client/db.go b/client/db.go index 660c03998f..a5d855f137 100644 --- a/client/db.go +++ b/client/db.go @@ -85,6 +85,18 @@ type DB interface { // // It is likely unwise to call this on a large database instance. PrintDump(ctx context.Context) error + + // AddPolicy adds policy to acp, if acp is available. + // + // If policy was successfully added to acp then a policyID is returned, + // otherwise if acp was not available then returns the following error: + // [client.ErrPolicyAddFailureNoACP] + // + // Detects the format of the policy automatically by assuming YAML format if JSON + // validation fails. + // + // Note: A policy can not be added without the creatorID (identity). + AddPolicy(ctx context.Context, creatorID string, policy string) (AddPolicyResult, error) } // Store contains the core DefraDB read-write operations. @@ -226,8 +238,12 @@ type Store interface { // GetAllIndexes returns all the indexes that currently exist within this [Store]. GetAllIndexes(context.Context) (map[CollectionName][]IndexDescription, error) - // ExecRequest executes the given GQL request against the [Store]. - ExecRequest(context.Context, string) *RequestResult + // ExecRequest executes the given GQL request against the [Store], with the given identity. + ExecRequest( + ctx context.Context, + identity immutable.Option[string], + request string, + ) *RequestResult } // GQLResult represents the immediate results of a GQL request. diff --git a/client/errors.go b/client/errors.go index 7e18e9566c..dbc29ed78b 100644 --- a/client/errors.go +++ b/client/errors.go @@ -48,7 +48,8 @@ var ( ErrOperationNotPermittedOnNamelessCols = errors.New(errOperationNotPermittedOnNamelessCols) ErrFieldNotObject = errors.New("trying to access field on a non object type") ErrValueTypeMismatch = errors.New("value does not match indicated type") - ErrDocumentNotFound = errors.New("no document for the given ID exists") + ErrDocumentNotFoundOrNotAuthorized = errors.New("document not found or not authorized to access") + ErrPolicyAddFailureNoACP = errors.New("failure adding policy because ACP was not available") ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type") ErrInvalidUpdater = errors.New("the updater of a document is of invalid type") ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type") diff --git a/client/mocks/collection.go b/client/mocks/collection.go index 6e6c7afae3..397bac7d1b 100644 --- a/client/mocks/collection.go +++ b/client/mocks/collection.go @@ -27,13 +27,13 @@ func (_m *Collection) EXPECT() *Collection_Expecter { return &Collection_Expecter{mock: &_m.Mock} } -// Create provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Create(_a0 context.Context, _a1 *client.Document) error { - ret := _m.Called(_a0, _a1) +// Create provides a mock function with given fields: ctx, identity, doc +func (_m *Collection) Create(ctx context.Context, identity immutable.Option[string], doc *client.Document) error { + ret := _m.Called(ctx, identity, doc) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], *client.Document) error); ok { + r0 = rf(ctx, identity, doc) } else { r0 = ret.Error(0) } @@ -47,25 +47,69 @@ type Collection_Create_Call struct { } // Create is a helper method to define mock.On call +// - ctx context.Context +// - identity immutable.Option[string] +// - doc *client.Document +func (_e *Collection_Expecter) Create(ctx interface{}, identity interface{}, doc interface{}) *Collection_Create_Call { + return &Collection_Create_Call{Call: _e.mock.On("Create", ctx, identity, doc)} +} + +func (_c *Collection_Create_Call) Run(run func(ctx context.Context, identity immutable.Option[string], doc *client.Document)) *Collection_Create_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(*client.Document)) + }) + return _c +} + +func (_c *Collection_Create_Call) Return(_a0 error) *Collection_Create_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Collection_Create_Call) RunAndReturn(run func(context.Context, immutable.Option[string], *client.Document) error) *Collection_Create_Call { + _c.Call.Return(run) + return _c +} + +// CreateDocIndex provides a mock function with given fields: _a0, _a1 +func (_m *Collection) CreateDocIndex(_a0 context.Context, _a1 *client.Document) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Collection_CreateDocIndex_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateDocIndex' +type Collection_CreateDocIndex_Call struct { + *mock.Call +} + +// CreateDocIndex is a helper method to define mock.On call // - _a0 context.Context // - _a1 *client.Document -func (_e *Collection_Expecter) Create(_a0 interface{}, _a1 interface{}) *Collection_Create_Call { - return &Collection_Create_Call{Call: _e.mock.On("Create", _a0, _a1)} +func (_e *Collection_Expecter) CreateDocIndex(_a0 interface{}, _a1 interface{}) *Collection_CreateDocIndex_Call { + return &Collection_CreateDocIndex_Call{Call: _e.mock.On("CreateDocIndex", _a0, _a1)} } -func (_c *Collection_Create_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_Create_Call { +func (_c *Collection_CreateDocIndex_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_CreateDocIndex_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(*client.Document)) }) return _c } -func (_c *Collection_Create_Call) Return(_a0 error) *Collection_Create_Call { +func (_c *Collection_CreateDocIndex_Call) Return(_a0 error) *Collection_CreateDocIndex_Call { _c.Call.Return(_a0) return _c } -func (_c *Collection_Create_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_Create_Call { +func (_c *Collection_CreateDocIndex_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_CreateDocIndex_Call { _c.Call.Return(run) return _c } @@ -123,13 +167,13 @@ func (_c *Collection_CreateIndex_Call) RunAndReturn(run func(context.Context, cl return _c } -// CreateMany provides a mock function with given fields: _a0, _a1 -func (_m *Collection) CreateMany(_a0 context.Context, _a1 []*client.Document) error { - ret := _m.Called(_a0, _a1) +// CreateMany provides a mock function with given fields: ctx, identity, docs +func (_m *Collection) CreateMany(ctx context.Context, identity immutable.Option[string], docs []*client.Document) error { + ret := _m.Called(ctx, identity, docs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, []*client.Document) error); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []*client.Document) error); ok { + r0 = rf(ctx, identity, docs) } else { r0 = ret.Error(0) } @@ -143,15 +187,16 @@ type Collection_CreateMany_Call struct { } // CreateMany is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []*client.Document -func (_e *Collection_Expecter) CreateMany(_a0 interface{}, _a1 interface{}) *Collection_CreateMany_Call { - return &Collection_CreateMany_Call{Call: _e.mock.On("CreateMany", _a0, _a1)} +// - ctx context.Context +// - identity immutable.Option[string] +// - docs []*client.Document +func (_e *Collection_Expecter) CreateMany(ctx interface{}, identity interface{}, docs interface{}) *Collection_CreateMany_Call { + return &Collection_CreateMany_Call{Call: _e.mock.On("CreateMany", ctx, identity, docs)} } -func (_c *Collection_CreateMany_Call) Run(run func(_a0 context.Context, _a1 []*client.Document)) *Collection_CreateMany_Call { +func (_c *Collection_CreateMany_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docs []*client.Document)) *Collection_CreateMany_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]*client.Document)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].([]*client.Document)) }) return _c } @@ -161,7 +206,7 @@ func (_c *Collection_CreateMany_Call) Return(_a0 error) *Collection_CreateMany_C return _c } -func (_c *Collection_CreateMany_Call) RunAndReturn(run func(context.Context, []*client.Document) error) *Collection_CreateMany_Call { +func (_c *Collection_CreateMany_Call) RunAndReturn(run func(context.Context, immutable.Option[string], []*client.Document) error) *Collection_CreateMany_Call { _c.Call.Return(run) return _c } @@ -207,23 +252,23 @@ func (_c *Collection_Definition_Call) RunAndReturn(run func() client.CollectionD return _c } -// Delete provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Delete(_a0 context.Context, _a1 client.DocID) (bool, error) { - ret := _m.Called(_a0, _a1) +// Delete provides a mock function with given fields: ctx, identity, docID +func (_m *Collection) Delete(ctx context.Context, identity immutable.Option[string], docID client.DocID) (bool, error) { + ret := _m.Called(ctx, identity, docID) var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { - return rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) (bool, error)); ok { + return rf(ctx, identity, docID) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) bool); ok { + r0 = rf(ctx, identity, docID) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { - r1 = rf(_a0, _a1) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID) error); ok { + r1 = rf(ctx, identity, docID) } else { r1 = ret.Error(1) } @@ -237,15 +282,16 @@ type Collection_Delete_Call struct { } // Delete is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 client.DocID -func (_e *Collection_Expecter) Delete(_a0 interface{}, _a1 interface{}) *Collection_Delete_Call { - return &Collection_Delete_Call{Call: _e.mock.On("Delete", _a0, _a1)} +// - ctx context.Context +// - identity immutable.Option[string] +// - docID client.DocID +func (_e *Collection_Expecter) Delete(ctx interface{}, identity interface{}, docID interface{}) *Collection_Delete_Call { + return &Collection_Delete_Call{Call: _e.mock.On("Delete", ctx, identity, docID)} } -func (_c *Collection_Delete_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID)) *Collection_Delete_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID)) }) return _c } @@ -255,30 +301,73 @@ func (_c *Collection_Delete_Call) Return(_a0 bool, _a1 error) *Collection_Delete return _c } -func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, client.DocID) (bool, error)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID) (bool, error)) *Collection_Delete_Call { _c.Call.Return(run) return _c } -// DeleteWith provides a mock function with given fields: ctx, target -func (_m *Collection) DeleteWith(ctx context.Context, target interface{}) (*client.DeleteResult, error) { - ret := _m.Called(ctx, target) +// DeleteDocIndex provides a mock function with given fields: _a0, _a1 +func (_m *Collection) DeleteDocIndex(_a0 context.Context, _a1 *client.Document) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Collection_DeleteDocIndex_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteDocIndex' +type Collection_DeleteDocIndex_Call struct { + *mock.Call +} + +// DeleteDocIndex is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 *client.Document +func (_e *Collection_Expecter) DeleteDocIndex(_a0 interface{}, _a1 interface{}) *Collection_DeleteDocIndex_Call { + return &Collection_DeleteDocIndex_Call{Call: _e.mock.On("DeleteDocIndex", _a0, _a1)} +} + +func (_c *Collection_DeleteDocIndex_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_DeleteDocIndex_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.Document)) + }) + return _c +} + +func (_c *Collection_DeleteDocIndex_Call) Return(_a0 error) *Collection_DeleteDocIndex_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Collection_DeleteDocIndex_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_DeleteDocIndex_Call { + _c.Call.Return(run) + return _c +} + +// DeleteWith provides a mock function with given fields: ctx, identity, target +func (_m *Collection) DeleteWith(ctx context.Context, identity immutable.Option[string], target interface{}) (*client.DeleteResult, error) { + ret := _m.Called(ctx, identity, target) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { - return rf(ctx, target) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}) (*client.DeleteResult, error)); ok { + return rf(ctx, identity, target) } - if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { - r0 = rf(ctx, target) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}) *client.DeleteResult); ok { + r0 = rf(ctx, identity, target) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { - r1 = rf(ctx, target) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], interface{}) error); ok { + r1 = rf(ctx, identity, target) } else { r1 = ret.Error(1) } @@ -293,14 +382,15 @@ type Collection_DeleteWith_Call struct { // DeleteWith is a helper method to define mock.On call // - ctx context.Context +// - identity immutable.Option[string] // - target interface{} -func (_e *Collection_Expecter) DeleteWith(ctx interface{}, target interface{}) *Collection_DeleteWith_Call { - return &Collection_DeleteWith_Call{Call: _e.mock.On("DeleteWith", ctx, target)} +func (_e *Collection_Expecter) DeleteWith(ctx interface{}, identity interface{}, target interface{}) *Collection_DeleteWith_Call { + return &Collection_DeleteWith_Call{Call: _e.mock.On("DeleteWith", ctx, identity, target)} } -func (_c *Collection_DeleteWith_Call) Run(run func(ctx context.Context, target interface{})) *Collection_DeleteWith_Call { +func (_c *Collection_DeleteWith_Call) Run(run func(ctx context.Context, identity immutable.Option[string], target interface{})) *Collection_DeleteWith_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{})) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(interface{})) }) return _c } @@ -310,30 +400,30 @@ func (_c *Collection_DeleteWith_Call) Return(_a0 *client.DeleteResult, _a1 error return _c } -func (_c *Collection_DeleteWith_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWith_Call { +func (_c *Collection_DeleteWith_Call) RunAndReturn(run func(context.Context, immutable.Option[string], interface{}) (*client.DeleteResult, error)) *Collection_DeleteWith_Call { _c.Call.Return(run) return _c } -// DeleteWithDocID provides a mock function with given fields: _a0, _a1 -func (_m *Collection) DeleteWithDocID(_a0 context.Context, _a1 client.DocID) (*client.DeleteResult, error) { - ret := _m.Called(_a0, _a1) +// DeleteWithDocID provides a mock function with given fields: ctx, identity, docID +func (_m *Collection) DeleteWithDocID(ctx context.Context, identity immutable.Option[string], docID client.DocID) (*client.DeleteResult, error) { + ret := _m.Called(ctx, identity, docID) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (*client.DeleteResult, error)); ok { - return rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) (*client.DeleteResult, error)); ok { + return rf(ctx, identity, docID) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) *client.DeleteResult); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) *client.DeleteResult); ok { + r0 = rf(ctx, identity, docID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { - r1 = rf(_a0, _a1) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID) error); ok { + r1 = rf(ctx, identity, docID) } else { r1 = ret.Error(1) } @@ -347,15 +437,16 @@ type Collection_DeleteWithDocID_Call struct { } // DeleteWithDocID is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 client.DocID -func (_e *Collection_Expecter) DeleteWithDocID(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithDocID_Call { - return &Collection_DeleteWithDocID_Call{Call: _e.mock.On("DeleteWithDocID", _a0, _a1)} +// - ctx context.Context +// - identity immutable.Option[string] +// - docID client.DocID +func (_e *Collection_Expecter) DeleteWithDocID(ctx interface{}, identity interface{}, docID interface{}) *Collection_DeleteWithDocID_Call { + return &Collection_DeleteWithDocID_Call{Call: _e.mock.On("DeleteWithDocID", ctx, identity, docID)} } -func (_c *Collection_DeleteWithDocID_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_DeleteWithDocID_Call { +func (_c *Collection_DeleteWithDocID_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID)) *Collection_DeleteWithDocID_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID)) }) return _c } @@ -365,30 +456,30 @@ func (_c *Collection_DeleteWithDocID_Call) Return(_a0 *client.DeleteResult, _a1 return _c } -func (_c *Collection_DeleteWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocID_Call { +func (_c *Collection_DeleteWithDocID_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocID_Call { _c.Call.Return(run) return _c } -// DeleteWithDocIDs provides a mock function with given fields: _a0, _a1 -func (_m *Collection) DeleteWithDocIDs(_a0 context.Context, _a1 []client.DocID) (*client.DeleteResult, error) { - ret := _m.Called(_a0, _a1) +// DeleteWithDocIDs provides a mock function with given fields: ctx, identity, docIDs +func (_m *Collection) DeleteWithDocIDs(ctx context.Context, identity immutable.Option[string], docIDs []client.DocID) (*client.DeleteResult, error) { + ret := _m.Called(ctx, identity, docIDs) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) (*client.DeleteResult, error)); ok { - return rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []client.DocID) (*client.DeleteResult, error)); ok { + return rf(ctx, identity, docIDs) } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) *client.DeleteResult); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []client.DocID) *client.DeleteResult); ok { + r0 = rf(ctx, identity, docIDs) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, []client.DocID) error); ok { - r1 = rf(_a0, _a1) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], []client.DocID) error); ok { + r1 = rf(ctx, identity, docIDs) } else { r1 = ret.Error(1) } @@ -402,15 +493,16 @@ type Collection_DeleteWithDocIDs_Call struct { } // DeleteWithDocIDs is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []client.DocID -func (_e *Collection_Expecter) DeleteWithDocIDs(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithDocIDs_Call { - return &Collection_DeleteWithDocIDs_Call{Call: _e.mock.On("DeleteWithDocIDs", _a0, _a1)} +// - ctx context.Context +// - identity immutable.Option[string] +// - docIDs []client.DocID +func (_e *Collection_Expecter) DeleteWithDocIDs(ctx interface{}, identity interface{}, docIDs interface{}) *Collection_DeleteWithDocIDs_Call { + return &Collection_DeleteWithDocIDs_Call{Call: _e.mock.On("DeleteWithDocIDs", ctx, identity, docIDs)} } -func (_c *Collection_DeleteWithDocIDs_Call) Run(run func(_a0 context.Context, _a1 []client.DocID)) *Collection_DeleteWithDocIDs_Call { +func (_c *Collection_DeleteWithDocIDs_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docIDs []client.DocID)) *Collection_DeleteWithDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocID)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].([]client.DocID)) }) return _c } @@ -420,30 +512,30 @@ func (_c *Collection_DeleteWithDocIDs_Call) Return(_a0 *client.DeleteResult, _a1 return _c } -func (_c *Collection_DeleteWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocIDs_Call { +func (_c *Collection_DeleteWithDocIDs_Call) RunAndReturn(run func(context.Context, immutable.Option[string], []client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocIDs_Call { _c.Call.Return(run) return _c } -// DeleteWithFilter provides a mock function with given fields: ctx, filter -func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) (*client.DeleteResult, error) { - ret := _m.Called(ctx, filter) +// DeleteWithFilter provides a mock function with given fields: ctx, identity, filter +func (_m *Collection) DeleteWithFilter(ctx context.Context, identity immutable.Option[string], filter interface{}) (*client.DeleteResult, error) { + ret := _m.Called(ctx, identity, filter) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { - return rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}) (*client.DeleteResult, error)); ok { + return rf(ctx, identity, filter) } - if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { - r0 = rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}) *client.DeleteResult); ok { + r0 = rf(ctx, identity, filter) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { - r1 = rf(ctx, filter) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], interface{}) error); ok { + r1 = rf(ctx, identity, filter) } else { r1 = ret.Error(1) } @@ -458,14 +550,15 @@ type Collection_DeleteWithFilter_Call struct { // DeleteWithFilter is a helper method to define mock.On call // - ctx context.Context +// - identity immutable.Option[string] // - filter interface{} -func (_e *Collection_Expecter) DeleteWithFilter(ctx interface{}, filter interface{}) *Collection_DeleteWithFilter_Call { - return &Collection_DeleteWithFilter_Call{Call: _e.mock.On("DeleteWithFilter", ctx, filter)} +func (_e *Collection_Expecter) DeleteWithFilter(ctx interface{}, identity interface{}, filter interface{}) *Collection_DeleteWithFilter_Call { + return &Collection_DeleteWithFilter_Call{Call: _e.mock.On("DeleteWithFilter", ctx, identity, filter)} } -func (_c *Collection_DeleteWithFilter_Call) Run(run func(ctx context.Context, filter interface{})) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithFilter_Call) Run(run func(ctx context.Context, identity immutable.Option[string], filter interface{})) *Collection_DeleteWithFilter_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{})) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(interface{})) }) return _c } @@ -475,7 +568,7 @@ func (_c *Collection_DeleteWithFilter_Call) Return(_a0 *client.DeleteResult, _a1 return _c } -func (_c *Collection_DeleteWithFilter_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithFilter_Call) RunAndReturn(run func(context.Context, immutable.Option[string], interface{}) (*client.DeleteResult, error)) *Collection_DeleteWithFilter_Call { _c.Call.Return(run) return _c } @@ -564,23 +657,23 @@ func (_c *Collection_DropIndex_Call) RunAndReturn(run func(context.Context, stri return _c } -// Exists provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Exists(_a0 context.Context, _a1 client.DocID) (bool, error) { - ret := _m.Called(_a0, _a1) +// Exists provides a mock function with given fields: ctx, identity, docID +func (_m *Collection) Exists(ctx context.Context, identity immutable.Option[string], docID client.DocID) (bool, error) { + ret := _m.Called(ctx, identity, docID) var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { - return rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) (bool, error)); ok { + return rf(ctx, identity, docID) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) bool); ok { + r0 = rf(ctx, identity, docID) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { - r1 = rf(_a0, _a1) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID) error); ok { + r1 = rf(ctx, identity, docID) } else { r1 = ret.Error(1) } @@ -594,15 +687,16 @@ type Collection_Exists_Call struct { } // Exists is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 client.DocID -func (_e *Collection_Expecter) Exists(_a0 interface{}, _a1 interface{}) *Collection_Exists_Call { - return &Collection_Exists_Call{Call: _e.mock.On("Exists", _a0, _a1)} +// - ctx context.Context +// - identity immutable.Option[string] +// - docID client.DocID +func (_e *Collection_Expecter) Exists(ctx interface{}, identity interface{}, docID interface{}) *Collection_Exists_Call { + return &Collection_Exists_Call{Call: _e.mock.On("Exists", ctx, identity, docID)} } -func (_c *Collection_Exists_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID)) *Collection_Exists_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID)) }) return _c } @@ -612,30 +706,30 @@ func (_c *Collection_Exists_Call) Return(_a0 bool, _a1 error) *Collection_Exists return _c } -func (_c *Collection_Exists_Call) RunAndReturn(run func(context.Context, client.DocID) (bool, error)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID) (bool, error)) *Collection_Exists_Call { _c.Call.Return(run) return _c } -// Get provides a mock function with given fields: ctx, docID, showDeleted -func (_m *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { - ret := _m.Called(ctx, docID, showDeleted) +// Get provides a mock function with given fields: ctx, identity, docID, showDeleted +func (_m *Collection) Get(ctx context.Context, identity immutable.Option[string], docID client.DocID, showDeleted bool) (*client.Document, error) { + ret := _m.Called(ctx, identity, docID, showDeleted) var r0 *client.Document var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID, bool) (*client.Document, error)); ok { - return rf(ctx, docID, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID, bool) (*client.Document, error)); ok { + return rf(ctx, identity, docID, showDeleted) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID, bool) *client.Document); ok { - r0 = rf(ctx, docID, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID, bool) *client.Document); ok { + r0 = rf(ctx, identity, docID, showDeleted) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.Document) } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocID, bool) error); ok { - r1 = rf(ctx, docID, showDeleted) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID, bool) error); ok { + r1 = rf(ctx, identity, docID, showDeleted) } else { r1 = ret.Error(1) } @@ -650,15 +744,16 @@ type Collection_Get_Call struct { // Get is a helper method to define mock.On call // - ctx context.Context +// - identity immutable.Option[string] // - docID client.DocID // - showDeleted bool -func (_e *Collection_Expecter) Get(ctx interface{}, docID interface{}, showDeleted interface{}) *Collection_Get_Call { - return &Collection_Get_Call{Call: _e.mock.On("Get", ctx, docID, showDeleted)} +func (_e *Collection_Expecter) Get(ctx interface{}, identity interface{}, docID interface{}, showDeleted interface{}) *Collection_Get_Call { + return &Collection_Get_Call{Call: _e.mock.On("Get", ctx, identity, docID, showDeleted)} } -func (_c *Collection_Get_Call) Run(run func(ctx context.Context, docID client.DocID, showDeleted bool)) *Collection_Get_Call { +func (_c *Collection_Get_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID, showDeleted bool)) *Collection_Get_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID), args[2].(bool)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID), args[3].(bool)) }) return _c } @@ -668,30 +763,30 @@ func (_c *Collection_Get_Call) Return(_a0 *client.Document, _a1 error) *Collecti return _c } -func (_c *Collection_Get_Call) RunAndReturn(run func(context.Context, client.DocID, bool) (*client.Document, error)) *Collection_Get_Call { +func (_c *Collection_Get_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID, bool) (*client.Document, error)) *Collection_Get_Call { _c.Call.Return(run) return _c } -// GetAllDocIDs provides a mock function with given fields: ctx -func (_m *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { - ret := _m.Called(ctx) +// GetAllDocIDs provides a mock function with given fields: ctx, identity +func (_m *Collection) GetAllDocIDs(ctx context.Context, identity immutable.Option[string]) (<-chan client.DocIDResult, error) { + ret := _m.Called(ctx, identity) var r0 <-chan client.DocIDResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (<-chan client.DocIDResult, error)); ok { - return rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string]) (<-chan client.DocIDResult, error)); ok { + return rf(ctx, identity) } - if rf, ok := ret.Get(0).(func(context.Context) <-chan client.DocIDResult); ok { - r0 = rf(ctx) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string]) <-chan client.DocIDResult); ok { + r0 = rf(ctx, identity) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(<-chan client.DocIDResult) } } - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string]) error); ok { + r1 = rf(ctx, identity) } else { r1 = ret.Error(1) } @@ -706,13 +801,14 @@ type Collection_GetAllDocIDs_Call struct { // GetAllDocIDs is a helper method to define mock.On call // - ctx context.Context -func (_e *Collection_Expecter) GetAllDocIDs(ctx interface{}) *Collection_GetAllDocIDs_Call { - return &Collection_GetAllDocIDs_Call{Call: _e.mock.On("GetAllDocIDs", ctx)} +// - identity immutable.Option[string] +func (_e *Collection_Expecter) GetAllDocIDs(ctx interface{}, identity interface{}) *Collection_GetAllDocIDs_Call { + return &Collection_GetAllDocIDs_Call{Call: _e.mock.On("GetAllDocIDs", ctx, identity)} } -func (_c *Collection_GetAllDocIDs_Call) Run(run func(ctx context.Context)) *Collection_GetAllDocIDs_Call { +func (_c *Collection_GetAllDocIDs_Call) Run(run func(ctx context.Context, identity immutable.Option[string])) *Collection_GetAllDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context)) + run(args[0].(context.Context), args[1].(immutable.Option[string])) }) return _c } @@ -722,7 +818,7 @@ func (_c *Collection_GetAllDocIDs_Call) Return(_a0 <-chan client.DocIDResult, _a return _c } -func (_c *Collection_GetAllDocIDs_Call) RunAndReturn(run func(context.Context) (<-chan client.DocIDResult, error)) *Collection_GetAllDocIDs_Call { +func (_c *Collection_GetAllDocIDs_Call) RunAndReturn(run func(context.Context, immutable.Option[string]) (<-chan client.DocIDResult, error)) *Collection_GetAllDocIDs_Call { _c.Call.Return(run) return _c } @@ -863,13 +959,13 @@ func (_c *Collection_Name_Call) RunAndReturn(run func() immutable.Option[string] return _c } -// Save provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Save(_a0 context.Context, _a1 *client.Document) error { - ret := _m.Called(_a0, _a1) +// Save provides a mock function with given fields: ctx, identity, doc +func (_m *Collection) Save(ctx context.Context, identity immutable.Option[string], doc *client.Document) error { + ret := _m.Called(ctx, identity, doc) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], *client.Document) error); ok { + r0 = rf(ctx, identity, doc) } else { r0 = ret.Error(0) } @@ -883,15 +979,16 @@ type Collection_Save_Call struct { } // Save is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 *client.Document -func (_e *Collection_Expecter) Save(_a0 interface{}, _a1 interface{}) *Collection_Save_Call { - return &Collection_Save_Call{Call: _e.mock.On("Save", _a0, _a1)} +// - ctx context.Context +// - identity immutable.Option[string] +// - doc *client.Document +func (_e *Collection_Expecter) Save(ctx interface{}, identity interface{}, doc interface{}) *Collection_Save_Call { + return &Collection_Save_Call{Call: _e.mock.On("Save", ctx, identity, doc)} } -func (_c *Collection_Save_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_Save_Call { +func (_c *Collection_Save_Call) Run(run func(ctx context.Context, identity immutable.Option[string], doc *client.Document)) *Collection_Save_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*client.Document)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(*client.Document)) }) return _c } @@ -901,7 +998,7 @@ func (_c *Collection_Save_Call) Return(_a0 error) *Collection_Save_Call { return _c } -func (_c *Collection_Save_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_Save_Call { +func (_c *Collection_Save_Call) RunAndReturn(run func(context.Context, immutable.Option[string], *client.Document) error) *Collection_Save_Call { _c.Call.Return(run) return _c } @@ -988,13 +1085,13 @@ func (_c *Collection_SchemaRoot_Call) RunAndReturn(run func() string) *Collectio return _c } -// Update provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Update(_a0 context.Context, _a1 *client.Document) error { - ret := _m.Called(_a0, _a1) +// Update provides a mock function with given fields: ctx, identity, docs +func (_m *Collection) Update(ctx context.Context, identity immutable.Option[string], docs *client.Document) error { + ret := _m.Called(ctx, identity, docs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], *client.Document) error); ok { + r0 = rf(ctx, identity, docs) } else { r0 = ret.Error(0) } @@ -1008,15 +1105,16 @@ type Collection_Update_Call struct { } // Update is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 *client.Document -func (_e *Collection_Expecter) Update(_a0 interface{}, _a1 interface{}) *Collection_Update_Call { - return &Collection_Update_Call{Call: _e.mock.On("Update", _a0, _a1)} +// - ctx context.Context +// - identity immutable.Option[string] +// - docs *client.Document +func (_e *Collection_Expecter) Update(ctx interface{}, identity interface{}, docs interface{}) *Collection_Update_Call { + return &Collection_Update_Call{Call: _e.mock.On("Update", ctx, identity, docs)} } -func (_c *Collection_Update_Call) Run(run func(_a0 context.Context, _a1 *client.Document)) *Collection_Update_Call { +func (_c *Collection_Update_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docs *client.Document)) *Collection_Update_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(*client.Document)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(*client.Document)) }) return _c } @@ -1026,30 +1124,74 @@ func (_c *Collection_Update_Call) Return(_a0 error) *Collection_Update_Call { return _c } -func (_c *Collection_Update_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_Update_Call { +func (_c *Collection_Update_Call) RunAndReturn(run func(context.Context, immutable.Option[string], *client.Document) error) *Collection_Update_Call { _c.Call.Return(run) return _c } -// UpdateWith provides a mock function with given fields: ctx, target, updater -func (_m *Collection) UpdateWith(ctx context.Context, target interface{}, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, target, updater) +// UpdateDocIndex provides a mock function with given fields: ctx, oldDoc, newDoc +func (_m *Collection) UpdateDocIndex(ctx context.Context, oldDoc *client.Document, newDoc *client.Document) error { + ret := _m.Called(ctx, oldDoc, newDoc) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *client.Document, *client.Document) error); ok { + r0 = rf(ctx, oldDoc, newDoc) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Collection_UpdateDocIndex_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateDocIndex' +type Collection_UpdateDocIndex_Call struct { + *mock.Call +} + +// UpdateDocIndex is a helper method to define mock.On call +// - ctx context.Context +// - oldDoc *client.Document +// - newDoc *client.Document +func (_e *Collection_Expecter) UpdateDocIndex(ctx interface{}, oldDoc interface{}, newDoc interface{}) *Collection_UpdateDocIndex_Call { + return &Collection_UpdateDocIndex_Call{Call: _e.mock.On("UpdateDocIndex", ctx, oldDoc, newDoc)} +} + +func (_c *Collection_UpdateDocIndex_Call) Run(run func(ctx context.Context, oldDoc *client.Document, newDoc *client.Document)) *Collection_UpdateDocIndex_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(*client.Document), args[2].(*client.Document)) + }) + return _c +} + +func (_c *Collection_UpdateDocIndex_Call) Return(_a0 error) *Collection_UpdateDocIndex_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Collection_UpdateDocIndex_Call) RunAndReturn(run func(context.Context, *client.Document, *client.Document) error) *Collection_UpdateDocIndex_Call { + _c.Call.Return(run) + return _c +} + +// UpdateWith provides a mock function with given fields: ctx, identity, target, updater +func (_m *Collection) UpdateWith(ctx context.Context, identity immutable.Option[string], target interface{}, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, identity, target, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { - return rf(ctx, target, updater) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}, string) (*client.UpdateResult, error)); ok { + return rf(ctx, identity, target, updater) } - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { - r0 = rf(ctx, target, updater) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}, string) *client.UpdateResult); ok { + r0 = rf(ctx, identity, target, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { - r1 = rf(ctx, target, updater) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], interface{}, string) error); ok { + r1 = rf(ctx, identity, target, updater) } else { r1 = ret.Error(1) } @@ -1064,15 +1206,16 @@ type Collection_UpdateWith_Call struct { // UpdateWith is a helper method to define mock.On call // - ctx context.Context +// - identity immutable.Option[string] // - target interface{} // - updater string -func (_e *Collection_Expecter) UpdateWith(ctx interface{}, target interface{}, updater interface{}) *Collection_UpdateWith_Call { - return &Collection_UpdateWith_Call{Call: _e.mock.On("UpdateWith", ctx, target, updater)} +func (_e *Collection_Expecter) UpdateWith(ctx interface{}, identity interface{}, target interface{}, updater interface{}) *Collection_UpdateWith_Call { + return &Collection_UpdateWith_Call{Call: _e.mock.On("UpdateWith", ctx, identity, target, updater)} } -func (_c *Collection_UpdateWith_Call) Run(run func(ctx context.Context, target interface{}, updater string)) *Collection_UpdateWith_Call { +func (_c *Collection_UpdateWith_Call) Run(run func(ctx context.Context, identity immutable.Option[string], target interface{}, updater string)) *Collection_UpdateWith_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(interface{}), args[3].(string)) }) return _c } @@ -1082,30 +1225,30 @@ func (_c *Collection_UpdateWith_Call) Return(_a0 *client.UpdateResult, _a1 error return _c } -func (_c *Collection_UpdateWith_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWith_Call { +func (_c *Collection_UpdateWith_Call) RunAndReturn(run func(context.Context, immutable.Option[string], interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWith_Call { _c.Call.Return(run) return _c } -// UpdateWithDocID provides a mock function with given fields: ctx, docID, updater -func (_m *Collection) UpdateWithDocID(ctx context.Context, docID client.DocID, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, docID, updater) +// UpdateWithDocID provides a mock function with given fields: ctx, identity, docID, updater +func (_m *Collection) UpdateWithDocID(ctx context.Context, identity immutable.Option[string], docID client.DocID, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, identity, docID, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) (*client.UpdateResult, error)); ok { - return rf(ctx, docID, updater) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID, string) (*client.UpdateResult, error)); ok { + return rf(ctx, identity, docID, updater) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) *client.UpdateResult); ok { - r0 = rf(ctx, docID, updater) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID, string) *client.UpdateResult); ok { + r0 = rf(ctx, identity, docID, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocID, string) error); ok { - r1 = rf(ctx, docID, updater) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID, string) error); ok { + r1 = rf(ctx, identity, docID, updater) } else { r1 = ret.Error(1) } @@ -1120,15 +1263,16 @@ type Collection_UpdateWithDocID_Call struct { // UpdateWithDocID is a helper method to define mock.On call // - ctx context.Context +// - identity immutable.Option[string] // - docID client.DocID // - updater string -func (_e *Collection_Expecter) UpdateWithDocID(ctx interface{}, docID interface{}, updater interface{}) *Collection_UpdateWithDocID_Call { - return &Collection_UpdateWithDocID_Call{Call: _e.mock.On("UpdateWithDocID", ctx, docID, updater)} +func (_e *Collection_Expecter) UpdateWithDocID(ctx interface{}, identity interface{}, docID interface{}, updater interface{}) *Collection_UpdateWithDocID_Call { + return &Collection_UpdateWithDocID_Call{Call: _e.mock.On("UpdateWithDocID", ctx, identity, docID, updater)} } -func (_c *Collection_UpdateWithDocID_Call) Run(run func(ctx context.Context, docID client.DocID, updater string)) *Collection_UpdateWithDocID_Call { +func (_c *Collection_UpdateWithDocID_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID, updater string)) *Collection_UpdateWithDocID_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID), args[2].(string)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID), args[3].(string)) }) return _c } @@ -1138,30 +1282,30 @@ func (_c *Collection_UpdateWithDocID_Call) Return(_a0 *client.UpdateResult, _a1 return _c } -func (_c *Collection_UpdateWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocID_Call { +func (_c *Collection_UpdateWithDocID_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocID_Call { _c.Call.Return(run) return _c } -// UpdateWithDocIDs provides a mock function with given fields: _a0, _a1, _a2 -func (_m *Collection) UpdateWithDocIDs(_a0 context.Context, _a1 []client.DocID, _a2 string) (*client.UpdateResult, error) { - ret := _m.Called(_a0, _a1, _a2) +// UpdateWithDocIDs provides a mock function with given fields: ctx, identity, docIDs, updater +func (_m *Collection) UpdateWithDocIDs(ctx context.Context, identity immutable.Option[string], docIDs []client.DocID, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, identity, docIDs, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) (*client.UpdateResult, error)); ok { - return rf(_a0, _a1, _a2) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []client.DocID, string) (*client.UpdateResult, error)); ok { + return rf(ctx, identity, docIDs, updater) } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) *client.UpdateResult); ok { - r0 = rf(_a0, _a1, _a2) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []client.DocID, string) *client.UpdateResult); ok { + r0 = rf(ctx, identity, docIDs, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, []client.DocID, string) error); ok { - r1 = rf(_a0, _a1, _a2) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], []client.DocID, string) error); ok { + r1 = rf(ctx, identity, docIDs, updater) } else { r1 = ret.Error(1) } @@ -1175,16 +1319,17 @@ type Collection_UpdateWithDocIDs_Call struct { } // UpdateWithDocIDs is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []client.DocID -// - _a2 string -func (_e *Collection_Expecter) UpdateWithDocIDs(_a0 interface{}, _a1 interface{}, _a2 interface{}) *Collection_UpdateWithDocIDs_Call { - return &Collection_UpdateWithDocIDs_Call{Call: _e.mock.On("UpdateWithDocIDs", _a0, _a1, _a2)} +// - ctx context.Context +// - identity immutable.Option[string] +// - docIDs []client.DocID +// - updater string +func (_e *Collection_Expecter) UpdateWithDocIDs(ctx interface{}, identity interface{}, docIDs interface{}, updater interface{}) *Collection_UpdateWithDocIDs_Call { + return &Collection_UpdateWithDocIDs_Call{Call: _e.mock.On("UpdateWithDocIDs", ctx, identity, docIDs, updater)} } -func (_c *Collection_UpdateWithDocIDs_Call) Run(run func(_a0 context.Context, _a1 []client.DocID, _a2 string)) *Collection_UpdateWithDocIDs_Call { +func (_c *Collection_UpdateWithDocIDs_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docIDs []client.DocID, updater string)) *Collection_UpdateWithDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocID), args[2].(string)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].([]client.DocID), args[3].(string)) }) return _c } @@ -1194,30 +1339,30 @@ func (_c *Collection_UpdateWithDocIDs_Call) Return(_a0 *client.UpdateResult, _a1 return _c } -func (_c *Collection_UpdateWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocIDs_Call { +func (_c *Collection_UpdateWithDocIDs_Call) RunAndReturn(run func(context.Context, immutable.Option[string], []client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocIDs_Call { _c.Call.Return(run) return _c } -// UpdateWithFilter provides a mock function with given fields: ctx, filter, updater -func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, filter, updater) +// UpdateWithFilter provides a mock function with given fields: ctx, identity, filter, updater +func (_m *Collection) UpdateWithFilter(ctx context.Context, identity immutable.Option[string], filter interface{}, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, identity, filter, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { - return rf(ctx, filter, updater) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}, string) (*client.UpdateResult, error)); ok { + return rf(ctx, identity, filter, updater) } - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { - r0 = rf(ctx, filter, updater) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}, string) *client.UpdateResult); ok { + r0 = rf(ctx, identity, filter, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { - r1 = rf(ctx, filter, updater) + if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], interface{}, string) error); ok { + r1 = rf(ctx, identity, filter, updater) } else { r1 = ret.Error(1) } @@ -1232,15 +1377,16 @@ type Collection_UpdateWithFilter_Call struct { // UpdateWithFilter is a helper method to define mock.On call // - ctx context.Context +// - identity immutable.Option[string] // - filter interface{} // - updater string -func (_e *Collection_Expecter) UpdateWithFilter(ctx interface{}, filter interface{}, updater interface{}) *Collection_UpdateWithFilter_Call { - return &Collection_UpdateWithFilter_Call{Call: _e.mock.On("UpdateWithFilter", ctx, filter, updater)} +func (_e *Collection_Expecter) UpdateWithFilter(ctx interface{}, identity interface{}, filter interface{}, updater interface{}) *Collection_UpdateWithFilter_Call { + return &Collection_UpdateWithFilter_Call{Call: _e.mock.On("UpdateWithFilter", ctx, identity, filter, updater)} } -func (_c *Collection_UpdateWithFilter_Call) Run(run func(ctx context.Context, filter interface{}, updater string)) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithFilter_Call) Run(run func(ctx context.Context, identity immutable.Option[string], filter interface{}, updater string)) *Collection_UpdateWithFilter_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(interface{}), args[3].(string)) }) return _c } @@ -1250,7 +1396,7 @@ func (_c *Collection_UpdateWithFilter_Call) Return(_a0 *client.UpdateResult, _a1 return _c } -func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Context, immutable.Option[string], interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWithFilter_Call { _c.Call.Return(run) return _c } diff --git a/client/mocks/db.go b/client/mocks/db.go index c6f6711a59..c31578e190 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -32,6 +32,60 @@ func (_m *DB) EXPECT() *DB_Expecter { return &DB_Expecter{mock: &_m.Mock} } +// AddPolicy provides a mock function with given fields: ctx, creatorID, policy +func (_m *DB) AddPolicy(ctx context.Context, creatorID string, policy string) (client.AddPolicyResult, error) { + ret := _m.Called(ctx, creatorID, policy) + + var r0 client.AddPolicyResult + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) (client.AddPolicyResult, error)); ok { + return rf(ctx, creatorID, policy) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string) client.AddPolicyResult); ok { + r0 = rf(ctx, creatorID, policy) + } else { + r0 = ret.Get(0).(client.AddPolicyResult) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, creatorID, policy) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DB_AddPolicy_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddPolicy' +type DB_AddPolicy_Call struct { + *mock.Call +} + +// AddPolicy is a helper method to define mock.On call +// - ctx context.Context +// - creatorID string +// - policy string +func (_e *DB_Expecter) AddPolicy(ctx interface{}, creatorID interface{}, policy interface{}) *DB_AddPolicy_Call { + return &DB_AddPolicy_Call{Call: _e.mock.On("AddPolicy", ctx, creatorID, policy)} +} + +func (_c *DB_AddPolicy_Call) Run(run func(ctx context.Context, creatorID string, policy string)) *DB_AddPolicy_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *DB_AddPolicy_Call) Return(_a0 client.AddPolicyResult, _a1 error) *DB_AddPolicy_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *DB_AddPolicy_Call) RunAndReturn(run func(context.Context, string, string) (client.AddPolicyResult, error)) *DB_AddPolicy_Call { + _c.Call.Return(run) + return _c +} + // AddSchema provides a mock function with given fields: _a0, _a1 func (_m *DB) AddSchema(_a0 context.Context, _a1 string) ([]client.CollectionDescription, error) { ret := _m.Called(_a0, _a1) @@ -346,13 +400,13 @@ func (_c *DB_Events_Call) RunAndReturn(run func() events.Events) *DB_Events_Call return _c } -// ExecRequest provides a mock function with given fields: _a0, _a1 -func (_m *DB) ExecRequest(_a0 context.Context, _a1 string) *client.RequestResult { - ret := _m.Called(_a0, _a1) +// ExecRequest provides a mock function with given fields: ctx, identity, request +func (_m *DB) ExecRequest(ctx context.Context, identity immutable.Option[string], request string) *client.RequestResult { + ret := _m.Called(ctx, identity, request) var r0 *client.RequestResult - if rf, ok := ret.Get(0).(func(context.Context, string) *client.RequestResult); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], string) *client.RequestResult); ok { + r0 = rf(ctx, identity, request) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.RequestResult) @@ -368,15 +422,16 @@ type DB_ExecRequest_Call struct { } // ExecRequest is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 string -func (_e *DB_Expecter) ExecRequest(_a0 interface{}, _a1 interface{}) *DB_ExecRequest_Call { - return &DB_ExecRequest_Call{Call: _e.mock.On("ExecRequest", _a0, _a1)} +// - ctx context.Context +// - identity immutable.Option[string] +// - request string +func (_e *DB_Expecter) ExecRequest(ctx interface{}, identity interface{}, request interface{}) *DB_ExecRequest_Call { + return &DB_ExecRequest_Call{Call: _e.mock.On("ExecRequest", ctx, identity, request)} } -func (_c *DB_ExecRequest_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_ExecRequest_Call { +func (_c *DB_ExecRequest_Call) Run(run func(ctx context.Context, identity immutable.Option[string], request string)) *DB_ExecRequest_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(string)) }) return _c } @@ -386,7 +441,7 @@ func (_c *DB_ExecRequest_Call) Return(_a0 *client.RequestResult) *DB_ExecRequest return _c } -func (_c *DB_ExecRequest_Call) RunAndReturn(run func(context.Context, string) *client.RequestResult) *DB_ExecRequest_Call { +func (_c *DB_ExecRequest_Call) RunAndReturn(run func(context.Context, immutable.Option[string], string) *client.RequestResult) *DB_ExecRequest_Call { _c.Call.Return(run) return _c } diff --git a/client/policy.go b/client/policy.go new file mode 100644 index 0000000000..5b877696c2 --- /dev/null +++ b/client/policy.go @@ -0,0 +1,31 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +// PolicyDescription describes a policy which is made up of a valid policyID that is +// registered with acp and has a valid DPI compliant resource name that also +// exists on that policy, the description is already validated. +type PolicyDescription struct { + // ID is the local policyID when using local acp, and global policyID when + // using remote acp with sourcehub. This identifier is externally managed + // by the acp system. + ID string + + // ResourceName is the name of the corresponding resource within the policy. + ResourceName string +} + +// AddPolicyResult wraps the result of successfully adding/registering a Policy. +type AddPolicyResult struct { + // PolicyID is the unique identifier returned by the acp system, + // upon successful creation of a policy. + PolicyID string +} diff --git a/core/key.go b/core/key.go index 5c569f310d..69b19efb6e 100644 --- a/core/key.go +++ b/core/key.go @@ -43,7 +43,8 @@ const ( ) const ( - COLLECTION = "/collection/id" + COLLECTION = "collection" + COLLECTION_ID = "/collection/id" COLLECTION_NAME = "/collection/name" COLLECTION_SCHEMA_VERSION = "/collection/version" COLLECTION_INDEX = "/collection/index" @@ -326,7 +327,7 @@ func NewCollectionIndexKey(colID immutable.Option[uint32], indexName string) Col // Where [IndexName] might be omitted. Anything else will return an error. func NewCollectionIndexKeyFromString(key string) (CollectionIndexKey, error) { keyArr := strings.Split(key, "/") - if len(keyArr) < 4 || len(keyArr) > 5 || keyArr[1] != "collection" || keyArr[2] != "index" { + if len(keyArr) < 4 || len(keyArr) > 5 || keyArr[1] != COLLECTION || keyArr[2] != "index" { return CollectionIndexKey{}, ErrInvalidKey } @@ -564,7 +565,7 @@ func (k PrimaryDataStoreKey) ToString() string { } func (k CollectionKey) ToString() string { - return fmt.Sprintf("%s/%s", COLLECTION, strconv.Itoa(int(k.CollectionID))) + return fmt.Sprintf("%s/%s", COLLECTION_ID, strconv.Itoa(int(k.CollectionID))) } func (k CollectionKey) Bytes() []byte { diff --git a/core/parser.go b/core/parser.go index 05a90d0526..619f3fd1c2 100644 --- a/core/parser.go +++ b/core/parser.go @@ -51,6 +51,10 @@ type Parser interface { NewFilterFromString(collectionType string, body string) (immutable.Option[request.Filter], error) // ParseSDL parses an SDL string into a set of collection descriptions. + // + // The parsing should validate the syntax, but not validate what that syntax expresses + // is valid or not, i.e. we don't want the parser to make remote calls to verify the + // policy description is valid or not (that is the callers responsiblity). ParseSDL(ctx context.Context, schemaString string) ([]client.CollectionDefinition, error) // Adds the given schema to this parser's model. diff --git a/db/backup.go b/db/backup.go index 81d62f78b9..2d3b824be1 100644 --- a/db/backup.go +++ b/db/backup.go @@ -17,6 +17,7 @@ import ( "fmt" "os" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/datastore" @@ -90,7 +91,8 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin return NewErrDocFromMap(err) } - err = col.WithTxn(txn).Create(ctx, doc) + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to backup + err = col.WithTxn(txn).Create(ctx, acpIdentity.NoIdentity, doc) if err != nil { return NewErrDocCreate(err) } @@ -101,7 +103,8 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin if err != nil { return NewErrDocUpdate(err) } - err = col.WithTxn(txn).Update(ctx, doc) + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to backup + err = col.WithTxn(txn).Update(ctx, acpIdentity.NoIdentity, doc) if err != nil { return NewErrDocUpdate(err) } @@ -189,7 +192,8 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } colTxn := col.WithTxn(txn) - docIDsCh, err := colTxn.GetAllDocIDs(ctx) + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to export + docIDsCh, err := colTxn.GetAllDocIDs(ctx, acpIdentity.NoIdentity) if err != nil { return err } @@ -205,7 +209,8 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } } - doc, err := colTxn.Get(ctx, docResultWithID.ID, false) + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to export + doc, err := colTxn.Get(ctx, acpIdentity.NoIdentity, docResultWithID.ID, false) if err != nil { return err } @@ -237,7 +242,8 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return err } - foreignDoc, err := foreignCol.Get(ctx, foreignDocID, false) + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 + foreignDoc, err := foreignCol.Get(ctx, acpIdentity.NoIdentity, foreignDocID, false) if err != nil { err := doc.Set(field.Name+request.RelatedObjectID, nil) if err != nil { diff --git a/db/backup_test.go b/db/backup_test.go index 093b1a1a3f..6a9eab3cc9 100644 --- a/db/backup_test.go +++ b/db/backup_test.go @@ -18,6 +18,7 @@ import ( "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) @@ -46,10 +47,10 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, doc1) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) require.NoError(t, err) - err = col1.Create(ctx, doc2) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Address") @@ -58,7 +59,7 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, doc3) + err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -108,10 +109,10 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, doc1) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) require.NoError(t, err) - err = col1.Create(ctx, doc2) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Address") @@ -120,7 +121,7 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, doc3) + err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -170,10 +171,10 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, doc1) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) require.NoError(t, err) - err = col1.Create(ctx, doc2) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Address") @@ -182,7 +183,7 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, doc3) + err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -233,10 +234,10 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 31}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, doc1) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) require.NoError(t, err) - err = col1.Create(ctx, doc2) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Book") @@ -248,15 +249,15 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { doc4, err := client.NewDocFromJSON([]byte(`{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, doc3) + err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) require.NoError(t, err) - err = col2.Create(ctx, doc4) + err = col2.Create(ctx, acpIdentity.NoIdentity, doc4) require.NoError(t, err) err = doc1.Set("age", 31) require.NoError(t, err) - err = col1.Update(ctx, doc1) + err = col1.Update(ctx, acpIdentity.NoIdentity, doc1) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -306,10 +307,10 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, doc1) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) require.NoError(t, err) - err = col1.Create(ctx, doc2) + err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Address") @@ -318,7 +319,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, doc3) + err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -392,7 +393,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { key1, err := client.NewDocIDFromString("bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f") require.NoError(t, err) - _, err = col1.Get(ctx, key1, false) + _, err = col1.Get(ctx, acpIdentity.NoIdentity, key1, false) require.NoError(t, err) col2, err := db.getCollectionByName(ctx, txn, "User") @@ -400,12 +401,12 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { key2, err := client.NewDocIDFromString("bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df") require.NoError(t, err) - _, err = col2.Get(ctx, key2, false) + _, err = col2.Get(ctx, acpIdentity.NoIdentity, key2, false) require.NoError(t, err) key3, err := client.NewDocIDFromString("bae-e933420a-988a-56f8-8952-6c245aebd519") require.NoError(t, err) - _, err = col2.Get(ctx, key3, false) + _, err = col2.Get(ctx, acpIdentity.NoIdentity, key3, false) require.NoError(t, err) } diff --git a/db/collection.go b/db/collection.go index 2ad2cf2ca5..d7364df3b2 100644 --- a/db/collection.go +++ b/db/collection.go @@ -27,6 +27,7 @@ import ( "github.com/lens-vm/lens/host-go/config/model" "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" @@ -161,6 +162,7 @@ func (db *db) createCollection( } col := db.newCollection(desc, schema) + for _, index := range desc.Indexes { if _, err := col.createIndex(ctx, txn, index); err != nil { return nil, err @@ -170,6 +172,37 @@ func (db *db) createCollection( return db.getCollectionByID(ctx, txn, desc.ID) } +// validateCollectionDefinitionPolicyDesc validates that the policy definition is valid, beyond syntax. +// +// Ensures that the information within the policy definition makes sense, +// this function might also make relevant remote calls using the acp system. +func (db *db) validateCollectionDefinitionPolicyDesc( + ctx context.Context, + policyDesc immutable.Option[client.PolicyDescription], +) error { + if !policyDesc.HasValue() { + // No policy validation needed, whether acp exists or not doesn't matter. + return nil + } + + // If there is a policy specified, but the database does not have + // acp enabled/available return an error, database must have an acp available + // to enable access control (inorder to adhere to the policy specified). + if !db.acp.HasValue() { + return ErrCanNotHavePolicyWithoutACP + } + + // If we have the policy specified on the collection, and acp is available/enabled, + // then using the acp system we need to ensure the policy id specified + // actually exists as a policy, and the resource name exists on that policy + // and that the resource is a valid DPI. + return db.acp.Value().ValidateResourceExistsOnValidDPI( + ctx, + policyDesc.Value().ID, + policyDesc.Value().ResourceName, + ) +} + // updateSchema updates the persisted schema description matching the name of the given // description, to the values in the given description. // @@ -627,6 +660,7 @@ var patchCollectionValidators = []func( validateSourcesNotRedefined, validateIndexesNotModified, validateFieldsNotModified, + validatePolicyNotModified, validateIDNotZero, validateIDUnique, validateIDExists, @@ -768,6 +802,25 @@ func validateFieldsNotModified( return nil } +func validatePolicyNotModified( + oldColsByID map[uint32]client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, +) error { + for _, newCol := range newColsByID { + oldCol, ok := oldColsByID[newCol.ID] + if !ok { + continue + } + + // DeepEqual is temporary, as this validation is temporary + if !reflect.DeepEqual(oldCol.Policy, newCol.Policy) { + return NewErrCollectionPolicyCannotBeMutated(newCol.ID) + } + } + + return nil +} + func validateIDNotZero( oldColsByID map[uint32]client.CollectionDescription, newColsByID map[uint32]client.CollectionDescription, @@ -1025,6 +1078,7 @@ func (db *db) getCollectionByID(ctx context.Context, txn datastore.Txn, id uint3 } collection := db.newCollection(col, schema) + err = collection.loadIndexes(ctx, txn) if err != nil { return nil, err @@ -1182,17 +1236,21 @@ func (db *db) getAllActiveDefinitions(ctx context.Context, txn datastore.Txn) ([ // // @todo: We probably need a lock on the collection for this kind of op since // it hits every key and will cause Tx conflicts for concurrent Txs -func (c *collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { +func (c *collection) GetAllDocIDs( + ctx context.Context, + identity immutable.Option[string], +) (<-chan client.DocIDResult, error) { txn, err := c.getTxn(ctx, true) if err != nil { return nil, err } - return c.getAllDocIDsChan(ctx, txn) + return c.getAllDocIDsChan(ctx, identity, txn) } func (c *collection) getAllDocIDsChan( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, ) (<-chan client.DocIDResult, error) { prefix := core.PrimaryDataStoreKey{ // empty path for all keys prefix @@ -1235,12 +1293,29 @@ func (c *collection) getAllDocIDsChan( docID, err := client.NewDocIDFromString(rawDocID) if err != nil { resCh <- client.DocIDResult{ - Err: res.Error, + Err: err, + } + return + } + + canRead, err := c.checkAccessOfDocWithACP( + ctx, + identity, + acp.ReadPermission, + docID.String(), + ) + + if err != nil { + resCh <- client.DocIDResult{ + Err: err, } return } - resCh <- client.DocIDResult{ - ID: docID, + + if canRead { + resCh <- client.DocIDResult{ + ID: docID, + } } } }() @@ -1290,23 +1365,32 @@ func (c *collection) WithTxn(txn datastore.Txn) client.Collection { // Create a new document. // Will verify the DocID/CID to ensure that the new document is correctly formatted. -func (c *collection) Create(ctx context.Context, doc *client.Document) error { +func (c *collection) Create( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { txn, err := c.getTxn(ctx, false) if err != nil { return err } defer c.discardImplicitTxn(ctx, txn) - err = c.create(ctx, txn, doc) + err = c.create(ctx, identity, txn, doc) if err != nil { return err } + return c.commitImplicitTxn(ctx, txn) } // CreateMany creates a collection of documents at once. // Will verify the DocID/CID to ensure that the new documents are correctly formatted. -func (c *collection) CreateMany(ctx context.Context, docs []*client.Document) error { +func (c *collection) CreateMany( + ctx context.Context, + identity immutable.Option[string], + docs []*client.Document, +) error { txn, err := c.getTxn(ctx, false) if err != nil { return err @@ -1314,7 +1398,7 @@ func (c *collection) CreateMany(ctx context.Context, docs []*client.Document) er defer c.discardImplicitTxn(ctx, txn) for _, doc := range docs { - err = c.create(ctx, txn, doc) + err = c.create(ctx, identity, txn, doc) if err != nil { return err } @@ -1338,14 +1422,19 @@ func (c *collection) getDocIDAndPrimaryKeyFromDoc( return docID, primaryKey, nil } -func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client.Document) error { +func (c *collection) create( + ctx context.Context, + identity immutable.Option[string], + txn datastore.Txn, + doc *client.Document, +) error { docID, primaryKey, err := c.getDocIDAndPrimaryKeyFromDoc(doc) if err != nil { return err } // check if doc already exists - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) if err != nil { return err } @@ -1366,18 +1455,27 @@ func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client. } // write data to DB via MerkleClock/CRDT - _, err = c.save(ctx, txn, doc, true) + _, err = c.save(ctx, identity, txn, doc, true) if err != nil { return err } - return c.indexNewDoc(ctx, txn, doc) + err = c.indexNewDoc(ctx, txn, doc) + if err != nil { + return err + } + + return c.registerDocWithACP(ctx, identity, doc.ID().String()) } // Update an existing document with the new values. // Any field that needs to be removed or cleared should call doc.Clear(field) before. // Any field that is nil/empty that hasn't called Clear will be ignored. -func (c *collection) Update(ctx context.Context, doc *client.Document) error { +func (c *collection) Update( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { txn, err := c.getTxn(ctx, false) if err != nil { return err @@ -1385,18 +1483,18 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { defer c.discardImplicitTxn(ctx, txn) primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) if err != nil { return err } if !exists { - return client.ErrDocumentNotFound + return client.ErrDocumentNotFoundOrNotAuthorized } if isDeleted { return NewErrDocumentDeleted(primaryKey.DocID) } - err = c.update(ctx, txn, doc) + err = c.update(ctx, identity, txn, doc) if err != nil { return err } @@ -1409,8 +1507,27 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { // or, just update everything regardless. // Should probably be smart about the update due to the MerkleCRDT overhead, shouldn't // add to the bloat. -func (c *collection) update(ctx context.Context, txn datastore.Txn, doc *client.Document) error { - _, err := c.save(ctx, txn, doc, false) +func (c *collection) update( + ctx context.Context, + identity immutable.Option[string], + txn datastore.Txn, + doc *client.Document, +) error { + // Stop the update if the correct permissions aren't there. + canUpdate, err := c.checkAccessOfDocWithACP( + ctx, + identity, + acp.WritePermission, + doc.ID().String(), + ) + if err != nil { + return err + } + if !canUpdate { + return client.ErrDocumentNotFoundOrNotAuthorized + } + + _, err = c.save(ctx, identity, txn, doc, false) if err != nil { return err } @@ -1419,7 +1536,11 @@ func (c *collection) update(ctx context.Context, txn datastore.Txn, doc *client. // Save a document into the db. // Either by creating a new document or by updating an existing one -func (c *collection) Save(ctx context.Context, doc *client.Document) error { +func (c *collection) Save( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { txn, err := c.getTxn(ctx, false) if err != nil { return err @@ -1428,7 +1549,7 @@ func (c *collection) Save(ctx context.Context, doc *client.Document) error { // Check if document already exists with primary DS key. primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) if err != nil { return err } @@ -1438,9 +1559,9 @@ func (c *collection) Save(ctx context.Context, doc *client.Document) error { } if exists { - err = c.update(ctx, txn, doc) + err = c.update(ctx, identity, txn, doc) } else { - err = c.create(ctx, txn, doc) + err = c.create(ctx, identity, txn, doc) } if err != nil { return err @@ -1449,8 +1570,12 @@ func (c *collection) Save(ctx context.Context, doc *client.Document) error { return c.commitImplicitTxn(ctx, txn) } +// save saves the document state. save MUST not be called outside the `c.create` +// and `c.update` methods as we wrap the acp logic within those methods. Calling +// save elsewhere could cause the omission of acp checks. func (c *collection) save( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, doc *client.Document, isCreate bool, @@ -1503,7 +1628,15 @@ func (c *collection) save( if isSecondaryRelationID { primaryId := val.Value().(string) - err = c.patchPrimaryDoc(ctx, txn, c.Name().Value(), relationFieldDescription, primaryKey.DocID, primaryId) + err = c.patchPrimaryDoc( + ctx, + identity, + txn, + c.Name().Value(), + relationFieldDescription, + primaryKey.DocID, + primaryId, + ) if err != nil { return cid.Undef, err } @@ -1513,7 +1646,14 @@ func (c *collection) save( continue } - err = c.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, doc.ID().String(), fieldDescription, val.Value()) + err = c.validateOneToOneLinkDoesntAlreadyExist( + ctx, + identity, + txn, + doc.ID().String(), + fieldDescription, + val.Value(), + ) if err != nil { return cid.Undef, err } @@ -1579,6 +1719,7 @@ func (c *collection) save( func (c *collection) validateOneToOneLinkDoesntAlreadyExist( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, docID string, fieldDescription client.FieldDefinition, @@ -1625,7 +1766,7 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( fieldDescription.Name, value, ) - selectionPlan, err := c.makeSelectionPlan(ctx, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, identity, txn, filter) if err != nil { return err } @@ -1677,7 +1818,11 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( // otherwise will return false, along with an error, if it cannot. // If the document doesn't exist, then it will return false, and a ErrDocumentNotFound error. // This operation will all state relating to the given DocID. This includes data, block, and head storage. -func (c *collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { +func (c *collection) Delete( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, +) (bool, error) { txn, err := c.getTxn(ctx, false) if err != nil { return false, err @@ -1685,18 +1830,8 @@ func (c *collection) Delete(ctx context.Context, docID client.DocID) (bool, erro defer c.discardImplicitTxn(ctx, txn) primaryKey := c.getPrimaryKeyFromDocID(docID) - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) - if err != nil { - return false, err - } - if !exists || isDeleted { - return false, client.ErrDocumentNotFound - } - if isDeleted { - return false, NewErrDocumentDeleted(primaryKey.DocID) - } - err = c.applyDelete(ctx, txn, primaryKey) + err = c.applyDelete(ctx, identity, txn, primaryKey) if err != nil { return false, err } @@ -1704,7 +1839,11 @@ func (c *collection) Delete(ctx context.Context, docID client.DocID) (bool, erro } // Exists checks if a given document exists with supplied DocID. -func (c *collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { +func (c *collection) Exists( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, +) (bool, error) { txn, err := c.getTxn(ctx, false) if err != nil { return false, err @@ -1712,7 +1851,7 @@ func (c *collection) Exists(ctx context.Context, docID client.DocID) (bool, erro defer c.discardImplicitTxn(ctx, txn) primaryKey := c.getPrimaryKeyFromDocID(docID) - exists, isDeleted, err := c.exists(ctx, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) if err != nil && !errors.Is(err, ds.ErrNotFound) { return false, err } @@ -1722,9 +1861,22 @@ func (c *collection) Exists(ctx context.Context, docID client.DocID) (bool, erro // check if a document exists with the given primary key func (c *collection) exists( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, ) (exists bool, isDeleted bool, err error) { + canRead, err := c.checkAccessOfDocWithACP( + ctx, + identity, + acp.ReadPermission, + primaryKey.DocID, + ) + if err != nil { + return false, false, err + } else if !canRead { + return false, false, nil + } + val, err := txn.Datastore().Get(ctx, primaryKey.ToDS()) if err != nil && errors.Is(err, ds.ErrNotFound) { return false, false, nil @@ -1738,6 +1890,10 @@ func (c *collection) exists( return true, false, nil } +// saveCompositeToMerkleCRDT saves the composite to the merkle CRDT. +// saveCompositeToMerkleCRDT MUST not be called outside the `c.save` +// and `c.applyDelete` methods as we wrap the acp logic around those methods. +// Calling it elsewhere could cause the omission of acp checks. func (c *collection) saveCompositeToMerkleCRDT( ctx context.Context, txn datastore.Txn, diff --git a/db/collection_acp.go b/db/collection_acp.go new file mode 100644 index 0000000000..ccb4f3ae32 --- /dev/null +++ b/db/collection_acp.go @@ -0,0 +1,71 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + "github.com/sourcenetwork/defradb/db/permission" +) + +// registerDocWithACP handles the registration of the document with acp. +// The registering is done at document creation on the collection. +// +// According to our access logic we have these components to worry about: +// (1) the request is permissioned (has an identity signature), +// (2) the collection is permissioned (has a policy), +// (3) acp is available (acp is enabled). +// +// The document is only registered if all (1) (2) and (3) are true. +// +// Otherwise, nothing is registered with the acp system. +func (c *collection) registerDocWithACP( + ctx context.Context, + identity immutable.Option[string], + docID string, +) error { + // If acp is not available, then no document is registered. + if !c.db.acp.HasValue() { + return nil + } + + return permission.RegisterDocOnCollectionWithACP( + ctx, + identity, + c.db.acp.Value(), + c, + docID, + ) +} + +func (c *collection) checkAccessOfDocWithACP( + ctx context.Context, + identity immutable.Option[string], + dpiPermission acp.DPIPermission, + docID string, +) (bool, error) { + // If acp is not available, then we have unrestricted access. + if !c.db.acp.HasValue() { + return true, nil + } + + return permission.CheckAccessOfDocOnCollectionWithACP( + ctx, + identity, + c.db.acp.Value(), + c, + dpiPermission, + docID, + ) +} diff --git a/db/collection_delete.go b/db/collection_delete.go index 371c454532..984cd27a21 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -13,6 +13,9 @@ package db import ( "context" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" @@ -30,15 +33,16 @@ import ( // Eg: DeleteWithFilter or DeleteWithDocID func (c *collection) DeleteWith( ctx context.Context, + identity immutable.Option[string], target any, ) (*client.DeleteResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, t) + return c.DeleteWithFilter(ctx, identity, t) case client.DocID: - return c.DeleteWithDocID(ctx, t) + return c.DeleteWithDocID(ctx, identity, t) case []client.DocID: - return c.DeleteWithDocIDs(ctx, t) + return c.DeleteWithDocIDs(ctx, identity, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -47,6 +51,7 @@ func (c *collection) DeleteWith( // DeleteWithDocID deletes using a DocID to target a single document for delete. func (c *collection) DeleteWithDocID( ctx context.Context, + identity immutable.Option[string], docID client.DocID, ) (*client.DeleteResult, error) { txn, err := c.getTxn(ctx, false) @@ -57,7 +62,7 @@ func (c *collection) DeleteWithDocID( defer c.discardImplicitTxn(ctx, txn) dsKey := c.getPrimaryKeyFromDocID(docID) - res, err := c.deleteWithKey(ctx, txn, dsKey) + res, err := c.deleteWithKey(ctx, identity, txn, dsKey) if err != nil { return nil, err } @@ -68,6 +73,7 @@ func (c *collection) DeleteWithDocID( // DeleteWithDocIDs is the same as DeleteWithDocID but accepts multiple DocIDs as a slice. func (c *collection) DeleteWithDocIDs( ctx context.Context, + identity immutable.Option[string], docIDs []client.DocID, ) (*client.DeleteResult, error) { txn, err := c.getTxn(ctx, false) @@ -77,7 +83,7 @@ func (c *collection) DeleteWithDocIDs( defer c.discardImplicitTxn(ctx, txn) - res, err := c.deleteWithIDs(ctx, txn, docIDs, client.Deleted) + res, err := c.deleteWithIDs(ctx, identity, txn, docIDs, client.Deleted) if err != nil { return nil, err } @@ -88,6 +94,7 @@ func (c *collection) DeleteWithDocIDs( // DeleteWithFilter deletes using a filter to target documents for delete. func (c *collection) DeleteWithFilter( ctx context.Context, + identity immutable.Option[string], filter any, ) (*client.DeleteResult, error) { txn, err := c.getTxn(ctx, false) @@ -97,7 +104,7 @@ func (c *collection) DeleteWithFilter( defer c.discardImplicitTxn(ctx, txn) - res, err := c.deleteWithFilter(ctx, txn, filter, client.Deleted) + res, err := c.deleteWithFilter(ctx, identity, txn, filter, client.Deleted) if err != nil { return nil, err } @@ -107,12 +114,13 @@ func (c *collection) DeleteWithFilter( func (c *collection) deleteWithKey( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, key core.PrimaryDataStoreKey, ) (*client.DeleteResult, error) { // Check the key we have been given to delete with actually has a corresponding // document (i.e. document actually exists in the collection). - err := c.applyDelete(ctx, txn, key) + err := c.applyDelete(ctx, identity, txn, key) if err != nil { return nil, err } @@ -128,6 +136,7 @@ func (c *collection) deleteWithKey( func (c *collection) deleteWithIDs( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, docIDs []client.DocID, _ client.DocumentStatus, @@ -140,7 +149,7 @@ func (c *collection) deleteWithIDs( primaryKey := c.getPrimaryKeyFromDocID(docID) // Apply the function that will perform the full deletion of this document. - err := c.applyDelete(ctx, txn, primaryKey) + err := c.applyDelete(ctx, identity, txn, primaryKey) if err != nil { return nil, err } @@ -157,12 +166,13 @@ func (c *collection) deleteWithIDs( func (c *collection) deleteWithFilter( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, filter any, _ client.DocumentStatus, ) (*client.DeleteResult, error) { // Make a selection plan that will scan through only the documents with matching filter. - selectionPlan, err := c.makeSelectionPlan(ctx, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, identity, txn, filter) if err != nil { return nil, err } @@ -210,7 +220,7 @@ func (c *collection) deleteWithFilter( } // Delete the document that is associated with this DS key we got from the filter. - err = c.applyDelete(ctx, txn, primaryKey) + err = c.applyDelete(ctx, identity, txn, primaryKey) if err != nil { return nil, err } @@ -226,20 +236,37 @@ func (c *collection) deleteWithFilter( func (c *collection) applyDelete( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, ) error { - found, isDeleted, err := c.exists(ctx, txn, primaryKey) + // Must also have read permission to delete, inorder to check if document exists. + found, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) if err != nil { return err } if !found { - return client.ErrDocumentNotFound + return client.ErrDocumentNotFoundOrNotAuthorized } if isDeleted { return NewErrDocumentDeleted(primaryKey.DocID) } + // Stop deletion of document if the correct permissions aren't there. + canDelete, err := c.checkAccessOfDocWithACP( + ctx, + identity, + acp.WritePermission, + primaryKey.DocID, + ) + + if err != nil { + return err + } + if !canDelete { + return client.ErrDocumentNotFoundOrNotAuthorized + } + dsKey := primaryKey.ToDataStoreKey() headset := clock.NewHeadSet( diff --git a/db/collection_get.go b/db/collection_get.go index cf245fc678..16d5bd4711 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -13,6 +13,8 @@ package db import ( "context" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -20,7 +22,12 @@ import ( "github.com/sourcenetwork/defradb/db/fetcher" ) -func (c *collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { +func (c *collection) Get( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, + showDeleted bool, +) (*client.Document, error) { // create txn txn, err := c.getTxn(ctx, true) if err != nil { @@ -29,23 +36,29 @@ func (c *collection) Get(ctx context.Context, docID client.DocID, showDeleted bo defer c.discardImplicitTxn(ctx, txn) primaryKey := c.getPrimaryKeyFromDocID(docID) - found, isDeleted, err := c.exists(ctx, txn, primaryKey) + found, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) if err != nil { return nil, err } if !found || (isDeleted && !showDeleted) { - return nil, client.ErrDocumentNotFound + return nil, client.ErrDocumentNotFoundOrNotAuthorized } - doc, err := c.get(ctx, txn, primaryKey, nil, showDeleted) + doc, err := c.get(ctx, identity, txn, primaryKey, nil, showDeleted) if err != nil { return nil, err } + + if doc == nil { + return nil, client.ErrDocumentNotFoundOrNotAuthorized + } + return doc, c.commitImplicitTxn(ctx, txn) } func (c *collection) get( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, fields []client.FieldDefinition, @@ -54,7 +67,7 @@ func (c *collection) get( // create a new document fetcher df := c.newFetcher() // initialize it with the primary index - err := df.Init(ctx, txn, c, fields, nil, nil, false, showDeleted) + err := df.Init(ctx, identity, txn, c.db.acp, c, fields, nil, nil, false, showDeleted) if err != nil { _ = df.Close() return nil, err diff --git a/db/collection_index.go b/db/collection_index.go index 7fb036498a..1a7af8cc25 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -20,6 +20,7 @@ import ( "github.com/sourcenetwork/immutable" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -99,8 +100,11 @@ func (db *db) fetchCollectionIndexDescriptions( colID uint32, ) ([]client.IndexDescription, error) { prefix := core.NewCollectionIndexKey(immutable.Some(colID), "") - _, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](ctx, - prefix.ToString(), txn.Systemstore()) + _, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription]( + ctx, + prefix.ToString(), + txn.Systemstore(), + ) if err != nil { return nil, err } @@ -179,8 +183,11 @@ func (c *collection) updateIndexedDoc( if err != nil { return err } + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 - ACP <> Indexing, possibly also check + // and handle the case of when oldDoc == nil (will be nil if inaccessible document). oldDoc, err := c.get( ctx, + acpIdentity.NoIdentity, txn, c.getPrimaryKeyFromDocID(doc.ID()), c.Definition().CollectIndexedFields(), @@ -253,6 +260,12 @@ func (c *collection) createIndex( txn datastore.Txn, desc client.IndexDescription, ) (CollectionIndex, error) { + // Don't allow creating index on a permissioned collection, until following is implemented. + // TODO-ACP: ACP <> INDEX https://github.com/sourcenetwork/defradb/issues/2365 + if c.Description().Policy.HasValue() { + return nil, ErrCanNotCreateIndexOnCollectionWithPolicy + } + if desc.Name != "" && !schema.IsValidIndexName(desc.Name) { return nil, schema.NewErrIndexWithInvalidName("!") } @@ -315,7 +328,18 @@ func (c *collection) iterateAllDocs( exec func(doc *client.Document) error, ) error { df := c.newFetcher() - err := df.Init(ctx, txn, c, fields, nil, nil, false, false) + err := df.Init( + ctx, + acpIdentity.NoIdentity, // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 - ACP <> Indexing + txn, + c.db.acp, + c, + fields, + nil, + nil, + false, + false, + ) if err != nil { return errors.Join(err, df.Close()) } diff --git a/db/collection_update.go b/db/collection_update.go index a4a4dc3f4d..dcc3ba6cba 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -32,16 +32,17 @@ import ( // Eg: UpdateWithFilter or UpdateWithDocID func (c *collection) UpdateWith( ctx context.Context, + identity immutable.Option[string], target any, updater string, ) (*client.UpdateResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, t, updater) + return c.UpdateWithFilter(ctx, identity, t, updater) case client.DocID: - return c.UpdateWithDocID(ctx, t, updater) + return c.UpdateWithDocID(ctx, identity, t, updater) case []client.DocID: - return c.UpdateWithDocIDs(ctx, t, updater) + return c.UpdateWithDocIDs(ctx, identity, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -52,6 +53,7 @@ func (c *collection) UpdateWith( // or a parsed Patch, or parsed Merge Patch. func (c *collection) UpdateWithFilter( ctx context.Context, + identity immutable.Option[string], filter any, updater string, ) (*client.UpdateResult, error) { @@ -60,7 +62,7 @@ func (c *collection) UpdateWithFilter( return nil, err } defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithFilter(ctx, txn, filter, updater) + res, err := c.updateWithFilter(ctx, identity, txn, filter, updater) if err != nil { return nil, err } @@ -72,6 +74,7 @@ func (c *collection) UpdateWithFilter( // or a parsed Patch, or parsed Merge Patch. func (c *collection) UpdateWithDocID( ctx context.Context, + identity immutable.Option[string], docID client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -80,7 +83,7 @@ func (c *collection) UpdateWithDocID( return nil, err } defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithDocID(ctx, txn, docID, updater) + res, err := c.updateWithDocID(ctx, identity, txn, docID, updater) if err != nil { return nil, err } @@ -93,6 +96,7 @@ func (c *collection) UpdateWithDocID( // or a parsed Patch, or parsed Merge Patch. func (c *collection) UpdateWithDocIDs( ctx context.Context, + identity immutable.Option[string], docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -101,7 +105,7 @@ func (c *collection) UpdateWithDocIDs( return nil, err } defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithIDs(ctx, txn, docIDs, updater) + res, err := c.updateWithIDs(ctx, identity, txn, docIDs, updater) if err != nil { return nil, err } @@ -111,6 +115,7 @@ func (c *collection) UpdateWithDocIDs( func (c *collection) updateWithDocID( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, docID client.DocID, updater string, @@ -127,7 +132,7 @@ func (c *collection) updateWithDocID( return nil, client.ErrInvalidUpdater } - doc, err := c.Get(ctx, docID, false) + doc, err := c.Get(ctx, identity, docID, false) if err != nil { return nil, err } @@ -141,7 +146,7 @@ func (c *collection) updateWithDocID( return nil, err } - _, err = c.save(ctx, txn, doc, false) + err = c.update(ctx, identity, txn, doc) if err != nil { return nil, err } @@ -155,6 +160,7 @@ func (c *collection) updateWithDocID( func (c *collection) updateWithIDs( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, docIDs []client.DocID, updater string, @@ -175,7 +181,7 @@ func (c *collection) updateWithIDs( DocIDs: make([]string, len(docIDs)), } for i, docIDs := range docIDs { - doc, err := c.Get(ctx, docIDs, false) + doc, err := c.Get(ctx, identity, docIDs, false) if err != nil { return nil, err } @@ -189,7 +195,7 @@ func (c *collection) updateWithIDs( return nil, err } - _, err = c.save(ctx, txn, doc, false) + err = c.update(ctx, identity, txn, doc) if err != nil { return nil, err } @@ -202,6 +208,7 @@ func (c *collection) updateWithIDs( func (c *collection) updateWithFilter( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, filter any, updater string, @@ -223,7 +230,7 @@ func (c *collection) updateWithFilter( } // Make a selection plan that will scan through only the documents with matching filter. - selectionPlan, err := c.makeSelectionPlan(ctx, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, identity, txn, filter) if err != nil { return nil, err } @@ -277,7 +284,7 @@ func (c *collection) updateWithFilter( } } - _, err = c.save(ctx, txn, doc, false) + err = c.update(ctx, identity, txn, doc) if err != nil { return nil, err } @@ -310,6 +317,7 @@ func (c *collection) isSecondaryIDField(fieldDesc client.FieldDefinition) (clien // patched. func (c *collection) patchPrimaryDoc( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, secondaryCollectionName string, relationFieldDescription client.FieldDefinition, @@ -345,9 +353,11 @@ func (c *collection) patchPrimaryDoc( doc, err := primaryCol.Get( ctx, + identity, primaryDocID, false, ) + if err != nil && !errors.Is(err, ds.ErrNotFound) { return err } @@ -358,7 +368,14 @@ func (c *collection) patchPrimaryDoc( } pc := c.db.newCollection(primaryCol.Description(), primarySchema) - err = pc.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, primaryDocID.String(), primaryIDField, docID) + err = pc.validateOneToOneLinkDoesntAlreadyExist( + ctx, + identity, + txn, + primaryDocID.String(), + primaryIDField, + docID, + ) if err != nil { return err } @@ -377,7 +394,7 @@ func (c *collection) patchPrimaryDoc( return err } - err = primaryCol.Update(ctx, doc) + err = primaryCol.Update(ctx, identity, doc) if err != nil { return err } @@ -391,6 +408,7 @@ func (c *collection) patchPrimaryDoc( // Additionally it only requests for the root scalar fields of the object func (c *collection) makeSelectionPlan( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, filter any, ) (planner.RequestPlan, error) { @@ -417,7 +435,14 @@ func (c *collection) makeSelectionPlan( return nil, err } - planner := planner.New(ctx, c.db.WithTxn(txn), txn) + planner := planner.New( + ctx, + identity, + c.db.acp, + c.db.WithTxn(txn), + txn, + ) + return planner.MakePlan(&request.Request{ Queries: []*request.OperationDefinition{ { diff --git a/db/db.go b/db/db.go index 30036fc55d..1b02e1d0c1 100644 --- a/db/db.go +++ b/db/db.go @@ -25,6 +25,7 @@ import ( "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -44,7 +45,8 @@ var ( ) const ( - defaultMaxTxnRetries = 5 + defaultMaxTxnRetries = 5 + updateEventBufferSize = 100 ) // DB is the main interface for interacting with the @@ -71,12 +73,25 @@ type db struct { // The ID of the last transaction created. previousTxnID atomic.Uint64 + + // Contains ACP if it exists + acp immutable.Option[acp.ACP] } // Functional option type. type Option func(*db) -const updateEventBufferSize = 100 +// WithACP enables access control. If path is empty then acp runs in-memory. +func WithACP(path string) Option { + return func(db *db) { + var acpLocal acp.ACPLocal + acpLocal.Init(context.Background(), path) + db.acp = immutable.Some[acp.ACP](&acpLocal) + } +} + +// WithACPInMemory enables access control in-memory. +func WithACPInMemory() Option { return WithACP("") } // WithUpdateEvents enables the update events channel. func WithUpdateEvents() Option { @@ -104,11 +119,19 @@ func WithLensPoolSize(num int) Option { } // NewDB creates a new instance of the DB using the given options. -func NewDB(ctx context.Context, rootstore datastore.RootStore, options ...Option) (client.DB, error) { +func NewDB( + ctx context.Context, + rootstore datastore.RootStore, + options ...Option, +) (client.DB, error) { return newDB(ctx, rootstore, options...) } -func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option) (*implicitTxnDB, error) { +func newDB( + ctx context.Context, + rootstore datastore.RootStore, + options ...Option, +) (*implicitTxnDB, error) { multistore := datastore.MultiStoreFrom(rootstore) parser, err := graphql.NewParser() @@ -119,9 +142,9 @@ func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option db := &db{ rootstore: rootstore, multistore: multistore, - - parser: parser, - options: options, + acp: acp.NoACP, + parser: parser, + options: options, } // apply options @@ -184,6 +207,28 @@ func (db *db) LensRegistry() client.LensRegistry { return db.lensRegistry } +func (db *db) AddPolicy( + ctx context.Context, + creator string, + policy string, +) (client.AddPolicyResult, error) { + if !db.acp.HasValue() { + return client.AddPolicyResult{}, client.ErrPolicyAddFailureNoACP + } + + policyID, err := db.acp.Value().AddPolicy( + ctx, + creator, + policy, + ) + + if err != nil { + return client.AddPolicyResult{}, err + } + + return client.AddPolicyResult{PolicyID: policyID}, nil +} + // Initialize is called when a database is first run and creates all the db global meta data // like Collection ID counters. func (db *db) initialize(ctx context.Context) error { @@ -196,6 +241,14 @@ func (db *db) initialize(ctx context.Context) error { } defer txn.Discard(ctx) + // Start acp if enabled, this will recover previous state if there is any. + if db.acp.HasValue() { + // db is responsible to call db.acp.Close() to free acp resources while closing. + if err = db.acp.Value().Start(ctx); err != nil { + return err + } + } + exists, err := txn.Systemstore().Has(ctx, ds.NewKey("init")) if err != nil && !errors.Is(err, ds.ErrNotFound) { return err @@ -265,6 +318,13 @@ func (db *db) Close() { if err != nil { log.ErrorE("Failure closing running process", err) } + + if db.acp.HasValue() { + if err := db.acp.Value().Close(); err != nil { + log.ErrorE("Failure closing acp", err) + } + } + log.Info("Successfully closed running process") } diff --git a/db/description/collection.go b/db/description/collection.go index a6e9cd8b57..3658d3d318 100644 --- a/db/description/collection.go +++ b/db/description/collection.go @@ -237,7 +237,7 @@ func GetCollections( txn datastore.Txn, ) ([]client.CollectionDescription, error) { q, err := txn.Systemstore().Query(ctx, query.Query{ - Prefix: core.COLLECTION, + Prefix: core.COLLECTION_ID, }) if err != nil { return nil, NewErrFailedToCreateCollectionQuery(err) diff --git a/db/errors.go b/db/errors.go index b854ad2d3d..da82fcb941 100644 --- a/db/errors.go +++ b/db/errors.go @@ -88,14 +88,17 @@ const ( errCollectionSourceIDMutated string = "collection source ID cannot be mutated" errCollectionIndexesCannotBeMutated string = "collection indexes cannot be mutated" errCollectionFieldsCannotBeMutated string = "collection fields cannot be mutated" + errCollectionPolicyCannotBeMutated string = "collection policy cannot be mutated" errCollectionRootIDCannotBeMutated string = "collection root ID cannot be mutated" errCollectionSchemaVersionIDCannotBeMutated string = "collection schema version ID cannot be mutated" errCollectionIDCannotBeZero string = "collection ID cannot be zero" errCollectionsCannotBeDeleted string = "collections cannot be deleted" + errCanNotHavePolicyWithoutACP string = "can not specify policy on collection, without acp" ) var ( ErrFailedToGetCollection = errors.New(errFailedToGetCollection) + ErrCanNotCreateIndexOnCollectionWithPolicy = errors.New("can not create index on a collection with a policy") ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") ErrInvalidFilter = errors.New("invalid filter") ErrCollectionAlreadyExists = errors.New(errCollectionAlreadyExists) @@ -121,6 +124,7 @@ var ( ErrCollectionSchemaVersionIDCannotBeMutated = errors.New(errCollectionSchemaVersionIDCannotBeMutated) ErrCollectionIDCannotBeZero = errors.New(errCollectionIDCannotBeZero) ErrCollectionsCannotBeDeleted = errors.New(errCollectionsCannotBeDeleted) + ErrCanNotHavePolicyWithoutACP = errors.New(errCanNotHavePolicyWithoutACP) ) // NewErrFailedToGetHeads returns a new error indicating that the heads of a document @@ -597,6 +601,13 @@ func NewErrCollectionFieldsCannotBeMutated(colID uint32) error { ) } +func NewErrCollectionPolicyCannotBeMutated(colID uint32) error { + return errors.New( + errCollectionPolicyCannotBeMutated, + errors.NewKV("CollectionID", colID), + ) +} + func NewErrCollectionRootIDCannotBeMutated(colID uint32) error { return errors.New( errCollectionRootIDCannotBeMutated, diff --git a/db/fetcher/fetcher.go b/db/fetcher/fetcher.go index e4bb08cee4..db20310768 100644 --- a/db/fetcher/fetcher.go +++ b/db/fetcher/fetcher.go @@ -18,11 +18,15 @@ import ( "github.com/bits-and-blooms/bitset" dsq "github.com/ipfs/go-datastore/query" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/datastore/iterable" "github.com/sourcenetwork/defradb/db/base" + "github.com/sourcenetwork/defradb/db/permission" "github.com/sourcenetwork/defradb/planner/mapper" "github.com/sourcenetwork/defradb/request/graphql/parser" ) @@ -56,7 +60,9 @@ func (s *ExecInfo) Reset() { type Fetcher interface { Init( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -81,6 +87,10 @@ var ( // DocumentFetcher is a utility to incrementally fetch all the documents. type DocumentFetcher struct { + identity immutable.Option[string] + acp immutable.Option[acp.ACP] + passedPermissionCheck bool // have valid permission to access + col client.Collection reverse bool deletedDocs bool @@ -136,7 +146,9 @@ type DocumentFetcher struct { // Init implements DocumentFetcher. func (df *DocumentFetcher) Init( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -146,7 +158,7 @@ func (df *DocumentFetcher) Init( ) error { df.txn = txn - err := df.init(col, fields, filter, docmapper, reverse) + err := df.init(identity, acp, col, fields, filter, docmapper, reverse) if err != nil { return err } @@ -156,19 +168,23 @@ func (df *DocumentFetcher) Init( df.deletedDocFetcher = new(DocumentFetcher) df.deletedDocFetcher.txn = txn } - return df.deletedDocFetcher.init(col, fields, filter, docmapper, reverse) + return df.deletedDocFetcher.init(identity, acp, col, fields, filter, docmapper, reverse) } return nil } func (df *DocumentFetcher) init( + identity immutable.Option[string], + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docMapper *core.DocumentMapping, reverse bool, ) error { + df.identity = identity + df.acp = acp df.col = col df.reverse = reverse df.initialized = true @@ -476,6 +492,7 @@ func (df *DocumentFetcher) processKV(kv *keyValue) error { } } df.doc.id = []byte(kv.Key.DocID) + df.passedPermissionCheck = false df.passedFilter = false df.ranFilter = false @@ -544,24 +561,26 @@ func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, Exec (df.reverse && ddf.kv.Key.DocID > df.kv.Key.DocID) || (!df.reverse && ddf.kv.Key.DocID < df.kv.Key.DocID) { encdoc, execInfo, err := ddf.FetchNext(ctx) + if err != nil { return nil, ExecInfo{}, err } - if encdoc != nil { - return encdoc, execInfo, err - } resultExecInfo.Add(execInfo) + if encdoc != nil { + return encdoc, resultExecInfo, nil + } } } } encdoc, execInfo, err := df.fetchNext(ctx) + if err != nil { return nil, ExecInfo{}, err } - resultExecInfo.Add(execInfo) + resultExecInfo.Add(execInfo) return encdoc, resultExecInfo, err } @@ -573,9 +592,6 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec if df.kv == nil { return nil, ExecInfo{}, client.NewErrUninitializeProperty("DocumentFetcher", "kv") } - // save the DocID of the current kv pair so we can track when we cross the doc pair boundries - // keyparts := df.kv.Key.List() - // key := keyparts[len(keyparts)-2] prevExecInfo := df.execInfo defer func() { df.execInfo.Add(prevExecInfo) }() @@ -584,8 +600,7 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec // we'll know when were done when either // A) Reach the end of the iterator for { - err := df.processKV(df.kv) - if err != nil { + if err := df.processKV(df.kv); err != nil { return nil, ExecInfo{}, err } @@ -606,16 +621,45 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec } } - // if we don't pass the filter (ran and pass) - // theres no point in collecting other select fields - // so we seek to the next doc - spansDone, docDone, err := df.nextKey(ctx, !df.passedFilter && df.ranFilter) + // Check if we have read access, for document on this collection, with the given identity. + if !df.passedPermissionCheck { + if !df.acp.HasValue() { + // If no acp is available, then we have unrestricted access. + df.passedPermissionCheck = true + } else { + hasPermission, err := permission.CheckAccessOfDocOnCollectionWithACP( + ctx, + df.identity, + df.acp.Value(), + df.col, + acp.ReadPermission, + df.kv.Key.DocID, + ) + + if err != nil { + df.passedPermissionCheck = false + return nil, ExecInfo{}, err + } + + df.passedPermissionCheck = hasPermission + } + } + + // if we don't pass the filter (ran and pass) or if we don't have access to document then + // there is no point in collecting other select fields, so we seek to the next doc. + spansDone, docDone, err := df.nextKey(ctx, !df.passedPermissionCheck || !df.passedFilter && df.ranFilter) + if err != nil { return nil, ExecInfo{}, err } - if docDone { - df.execInfo.DocsFetched++ + if !docDone { + continue + } + + df.execInfo.DocsFetched++ + + if df.passedPermissionCheck { if df.filter != nil { // if we passed, return if df.passedFilter { @@ -636,21 +680,11 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec } else { return df.doc, df.execInfo, nil } + } - if !spansDone { - continue - } - + if spansDone { return nil, df.execInfo, nil } - - // // crossed document kv boundary? - // // if so, return document - // newkeyparts := df.kv.Key.List() - // newKey := newkeyparts[len(newkeyparts)-2] - // if newKey != key { - // return df.doc, nil - // } } } diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index 0d09a2fc9a..84b1cba103 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -13,6 +13,9 @@ package fetcher import ( "context" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -54,7 +57,9 @@ func NewIndexFetcher( func (f *IndexFetcher) Init( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -93,7 +98,18 @@ outer: f.indexIter = iter if f.docFetcher != nil && len(f.docFields) > 0 { - err = f.docFetcher.Init(ctx, f.txn, f.col, f.docFields, f.docFilter, f.mapping, false, false) + err = f.docFetcher.Init( + ctx, + identity, + f.txn, + acp, + f.col, + f.docFields, + f.docFilter, + f.mapping, + false, + false, + ) } return err diff --git a/db/fetcher/mocks/fetcher.go b/db/fetcher/mocks/fetcher.go index 044425c70b..e789032e47 100644 --- a/db/fetcher/mocks/fetcher.go +++ b/db/fetcher/mocks/fetcher.go @@ -3,16 +3,19 @@ package mocks import ( - context "context" - + acp "github.com/sourcenetwork/defradb/acp" client "github.com/sourcenetwork/defradb/client" + context "context" + core "github.com/sourcenetwork/defradb/core" datastore "github.com/sourcenetwork/defradb/datastore" fetcher "github.com/sourcenetwork/defradb/db/fetcher" + immutable "github.com/sourcenetwork/immutable" + mapper "github.com/sourcenetwork/defradb/planner/mapper" mock "github.com/stretchr/testify/mock" @@ -133,13 +136,13 @@ func (_c *Fetcher_FetchNext_Call) RunAndReturn(run func(context.Context) (fetche return _c } -// Init provides a mock function with given fields: ctx, txn, col, fields, filter, docmapper, reverse, showDeleted -func (_m *Fetcher) Init(ctx context.Context, txn datastore.Txn, col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool) error { - ret := _m.Called(ctx, txn, col, fields, filter, docmapper, reverse, showDeleted) +// Init provides a mock function with given fields: ctx, identity, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted +func (_m *Fetcher) Init(ctx context.Context, identity immutable.Option[string], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool) error { + ret := _m.Called(ctx, identity, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, datastore.Txn, client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error); ok { - r0 = rf(ctx, txn, col, fields, filter, docmapper, reverse, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error); ok { + r0 = rf(ctx, identity, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) } else { r0 = ret.Error(0) } @@ -154,20 +157,22 @@ type Fetcher_Init_Call struct { // Init is a helper method to define mock.On call // - ctx context.Context +// - identity immutable.Option[string] // - txn datastore.Txn +// - _a3 immutable.Option[acp.ACP] // - col client.Collection // - fields []client.FieldDefinition // - filter *mapper.Filter // - docmapper *core.DocumentMapping // - reverse bool // - showDeleted bool -func (_e *Fetcher_Expecter) Init(ctx interface{}, txn interface{}, col interface{}, fields interface{}, filter interface{}, docmapper interface{}, reverse interface{}, showDeleted interface{}) *Fetcher_Init_Call { - return &Fetcher_Init_Call{Call: _e.mock.On("Init", ctx, txn, col, fields, filter, docmapper, reverse, showDeleted)} +func (_e *Fetcher_Expecter) Init(ctx interface{}, identity interface{}, txn interface{}, _a3 interface{}, col interface{}, fields interface{}, filter interface{}, docmapper interface{}, reverse interface{}, showDeleted interface{}) *Fetcher_Init_Call { + return &Fetcher_Init_Call{Call: _e.mock.On("Init", ctx, identity, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted)} } -func (_c *Fetcher_Init_Call) Run(run func(ctx context.Context, txn datastore.Txn, col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool)) *Fetcher_Init_Call { +func (_c *Fetcher_Init_Call) Run(run func(ctx context.Context, identity immutable.Option[string], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool)) *Fetcher_Init_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(datastore.Txn), args[2].(client.Collection), args[3].([]client.FieldDefinition), args[4].(*mapper.Filter), args[5].(*core.DocumentMapping), args[6].(bool), args[7].(bool)) + run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(datastore.Txn), args[3].(immutable.Option[acp.ACP]), args[4].(client.Collection), args[5].([]client.FieldDefinition), args[6].(*mapper.Filter), args[7].(*core.DocumentMapping), args[8].(bool), args[9].(bool)) }) return _c } @@ -177,7 +182,7 @@ func (_c *Fetcher_Init_Call) Return(_a0 error) *Fetcher_Init_Call { return _c } -func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, datastore.Txn, client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error) *Fetcher_Init_Call { +func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, immutable.Option[string], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error) *Fetcher_Init_Call { _c.Call.Return(run) return _c } diff --git a/db/fetcher/mocks/utils.go b/db/fetcher/mocks/utils.go index 298d5b2ad6..524c46fc9e 100644 --- a/db/fetcher/mocks/utils.go +++ b/db/fetcher/mocks/utils.go @@ -27,6 +27,8 @@ func NewStubbedFetcher(t *testing.T) *Fetcher { mock.Anything, mock.Anything, mock.Anything, + mock.Anything, + mock.Anything, ).Maybe().Return(nil) f.EXPECT().Start(mock.Anything, mock.Anything).Maybe().Return(nil) f.EXPECT().FetchNext(mock.Anything).Maybe().Return(nil, nil) diff --git a/db/fetcher/versioned.go b/db/fetcher/versioned.go index 3f05f2c29a..16a4515939 100644 --- a/db/fetcher/versioned.go +++ b/db/fetcher/versioned.go @@ -19,6 +19,9 @@ import ( ds "github.com/ipfs/go-datastore" format "github.com/ipfs/go-ipld-format" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -91,6 +94,8 @@ type VersionedFetcher struct { queuedCids *list.List + acp immutable.Option[acp.ACP] + col client.Collection // @todo index *client.IndexDescription mCRDTs map[uint32]merklecrdt.MerkleCRDT @@ -99,7 +104,9 @@ type VersionedFetcher struct { // Init initializes the VersionedFetcher. func (vf *VersionedFetcher) Init( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -107,6 +114,7 @@ func (vf *VersionedFetcher) Init( reverse bool, showDeleted bool, ) error { + vf.acp = acp vf.col = col vf.queuedCids = list.New() vf.mCRDTs = make(map[uint32]merklecrdt.MerkleCRDT) @@ -130,7 +138,18 @@ func (vf *VersionedFetcher) Init( // run the DF init, VersionedFetchers only supports the Primary (0) index vf.DocumentFetcher = new(DocumentFetcher) - return vf.DocumentFetcher.Init(ctx, vf.store, col, fields, filter, docmapper, reverse, showDeleted) + return vf.DocumentFetcher.Init( + ctx, + identity, + vf.store, + acp, + col, + fields, + filter, + docmapper, + reverse, + showDeleted, + ) } // Start serializes the correct state according to the Key and CID. diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index aeca3341b6..c11eb2617f 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -23,6 +23,8 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -46,7 +48,7 @@ type productDoc struct { } func (f *indexTestFixture) saveDocToCollection(doc *client.Document, col client.Collection) { - err := col.Create(f.ctx, doc) + err := col.Create(f.ctx, acpIdentity.NoIdentity, doc) require.NoError(f.t, err) f.commitTxn() f.txn, err = f.db.NewTxn(f.ctx, false) @@ -320,7 +322,7 @@ func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) { dataStoreOn.Put(mock.Anything, key.ToDS(), mock.Anything).Return(errors.New("error")) dataStoreOn.Put(mock.Anything, mock.Anything, mock.Anything).Return(nil) - err := f.users.WithTxn(mockTxn).Create(f.ctx, doc) + err := f.users.WithTxn(mockTxn).Create(f.ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(f.t, err, NewErrFailedToStoreIndexedField("name", nil)) } @@ -338,7 +340,7 @@ func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { doc, err := client.NewDocFromJSON(data, f.users.Schema()) require.NoError(f.t, err) - err = f.users.Create(f.ctx, doc) + err = f.users.Create(f.ctx, acpIdentity.NoIdentity, doc) require.NoError(f.t, err) key := newIndexKeyBuilder(f).Col(usersColName).Build() @@ -358,7 +360,7 @@ func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) systemStoreOn.Query(mock.Anything, mock.Anything). Return(mocks.NewQueryResultsWithValues(t, []byte("invalid")), nil) - err := f.users.WithTxn(mockTxn).Create(f.ctx, doc) + err := f.users.WithTxn(mockTxn).Create(f.ctx, acpIdentity.NoIdentity, doc) assert.ErrorIs(t, err, datastore.NewErrInvalidStoredValue(nil)) } @@ -376,7 +378,7 @@ func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { systemStoreOn.Query(mock.Anything, mock.Anything). Return(nil, testErr) - err := f.users.WithTxn(mockTxn).Create(f.ctx, doc) + err := f.users.WithTxn(mockTxn).Create(f.ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(t, err, testErr) } @@ -409,9 +411,9 @@ func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t userDoc := f.newUserDoc("John", 21, users) prodDoc := f.newProdDoc(1, 3, "games", products) - err = users.Create(f.ctx, userDoc) + err = users.Create(f.ctx, acpIdentity.NoIdentity, userDoc) require.NoError(f.t, err) - err = products.Create(f.ctx, prodDoc) + err = products.Create(f.ctx, acpIdentity.NoIdentity, prodDoc) require.NoError(f.t, err) f.commitTxn() @@ -588,8 +590,30 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t Name: "Fails to init", PrepareFetcher: func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Unset() - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(testError) + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Unset() + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Return(testError) f.EXPECT().Close().Unset() f.EXPECT().Close().Return(nil) return f @@ -721,14 +745,14 @@ func TestNonUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { Name: "update", NewValue: "Islam", Exec: func(doc *client.Document) error { - return f.users.Update(f.ctx, doc) + return f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) }, }, { Name: "save", NewValue: "Andy", Exec: func(doc *client.Document) error { - return f.users.Save(f.ctx, doc) + return f.users.Save(f.ctx, acpIdentity.NoIdentity, doc) }, }, } @@ -782,7 +806,7 @@ func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) usersCol.(*collection).fetcherFactory = func() fetcher.Fetcher { return fetcherMocks.NewStubbedFetcher(t) } - err = usersCol.WithTxn(mockedTxn).Update(f.ctx, doc) + err = usersCol.WithTxn(mockedTxn).Update(f.ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(t, err, testErr) } @@ -797,8 +821,30 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { Name: "Fails to init", PrepareFetcher: func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Unset() - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(testError) + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Unset() + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Return(testError) f.EXPECT().Close().Unset() f.EXPECT().Close().Return(nil) return f @@ -856,7 +902,7 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { err := doc.Set(usersNameFieldName, "Islam") require.NoError(t, err, tc.Name) - err = f.users.Update(f.ctx, doc) + err = f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) require.Error(t, err, tc.Name) newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build() @@ -884,7 +930,7 @@ func TestNonUniqueUpdate_IfFailsToUpdateIndex_ReturnError(t *testing.T) { err = doc.Set(usersAgeFieldName, 23) require.NoError(t, err) - err = f.users.Update(f.ctx, doc) + err = f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(t, err, ErrCorruptedIndex) } @@ -896,11 +942,35 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) { f.users.(*collection).fetcherFactory = func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Unset() - f.EXPECT().Init(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything). + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Unset() + f.EXPECT().Init( + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ). RunAndReturn(func( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -920,7 +990,7 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) { err := doc.Set(usersNameFieldName, "Islam") require.NoError(t, err) - _ = f.users.Update(f.ctx, doc) + _ = f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) } func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { @@ -978,7 +1048,7 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Datastore().Unset() mockedTxn.EXPECT().Datastore().Return(mockedTxn.MockDatastore).Maybe() - err = f.users.WithTxn(mockedTxn).Update(f.ctx, doc) + err = f.users.WithTxn(mockedTxn).Update(f.ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(t, err, testErr) } } @@ -1003,7 +1073,7 @@ func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) { err = doc.Set(usersNameFieldName, "John") require.NoError(f.t, err) - err = f.users.Update(f.ctx, doc) + err = f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) require.NoError(f.t, err) f.commitTxn() @@ -1127,14 +1197,14 @@ func TestUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { Name: "update", NewValue: "Islam", Exec: func(doc *client.Document) error { - return f.users.Update(f.ctx, doc) + return f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) }, }, { Name: "save", NewValue: "Andy", Exec: func(doc *client.Document) error { - return f.users.Save(f.ctx, doc) + return f.users.Save(f.ctx, acpIdentity.NoIdentity, doc) }, }, } @@ -1270,7 +1340,7 @@ func TestUniqueComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). Doc(doc).Unique().Build() - require.NoError(t, f.users.Update(f.ctx, doc), tc.Name) + require.NoError(t, f.users.Update(f.ctx, acpIdentity.NoIdentity, doc), tc.Name) f.commitTxn() _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) @@ -1319,7 +1389,7 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { NewValue: "Islam", Field: usersNameFieldName, Exec: func(doc *client.Document) error { - return f.users.Update(f.ctx, doc) + return f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) }, }, { @@ -1327,7 +1397,7 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { NewValue: "Andy", Field: usersNameFieldName, Exec: func(doc *client.Document) error { - return f.users.Save(f.ctx, doc) + return f.users.Save(f.ctx, acpIdentity.NoIdentity, doc) }, }, { @@ -1335,7 +1405,7 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { NewValue: 33, Field: usersAgeFieldName, Exec: func(doc *client.Document) error { - return f.users.Update(f.ctx, doc) + return f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) }, }, { @@ -1343,7 +1413,7 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { NewValue: 36, Field: usersAgeFieldName, Exec: func(doc *client.Document) error { - return f.users.Save(f.ctx, doc) + return f.users.Save(f.ctx, acpIdentity.NoIdentity, doc) }, }, } diff --git a/db/permission/check.go b/db/permission/check.go new file mode 100644 index 0000000000..b62b9c384b --- /dev/null +++ b/db/permission/check.go @@ -0,0 +1,91 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package permission + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + "github.com/sourcenetwork/defradb/client" +) + +// CheckAccessOfDocOnCollectionWithACP handles the check, which tells us if access to the target +// document is valid, with respect to the permission type, and the specified collection. +// +// This function should only be called if acp is available. As we have unrestricted +// access when acp is not available (acp turned off). +// +// Since we know acp is enabled we have these components to check in this function: +// (1) the request is permissioned (has an identity), +// (2) the collection is permissioned (has a policy), +// +// Unrestricted Access to document if: +// - (2) is false. +// - Document is public (unregistered), whether signatured request or not doesn't matter. +func CheckAccessOfDocOnCollectionWithACP( + ctx context.Context, + identityOptional immutable.Option[string], + acpSystem acp.ACP, + collection client.Collection, + permission acp.DPIPermission, + docID string, +) (bool, error) { + // Even if acp exists, but there is no policy on the collection (unpermissioned collection) + // then we still have unrestricted access. + policyID, resourceName, hasPolicy := isPermissioned(collection) + if !hasPolicy { + return true, nil + } + + // Now that we know acp is available and the collection is permissioned, before checking access with + // acp directly we need to make sure that the document is not public, as public documents will not + // be regestered with acp. We give unrestricted access to public documents, so it does not matter + // whether the request has a signature identity or not at this stage of the check. + isRegistered, err := acpSystem.IsDocRegistered( + ctx, + policyID, + resourceName, + docID, + ) + if err != nil { + return false, err + } + + if !isRegistered { + // Unrestricted access as it is a public document. + return true, nil + } + + // At this point if the request is not signatured, then it has no access, because: + // the collection has a policy on it, and the acp is enabled/available, + // and the document is not public (is regestered with acp). + if !identityOptional.HasValue() { + return false, nil + } + + // Now actually check using the signature if this identity has access or not. + hasAccess, err := acpSystem.CheckDocAccess( + ctx, + permission, + identityOptional.Value(), + policyID, + resourceName, + docID, + ) + + if err != nil { + return false, err + } + + return hasAccess, nil +} diff --git a/db/permission/permission.go b/db/permission/permission.go new file mode 100644 index 0000000000..3b365cba75 --- /dev/null +++ b/db/permission/permission.go @@ -0,0 +1,32 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package permission + +import ( + "github.com/sourcenetwork/defradb/client" +) + +// isPermissioned returns true if the collection has a policy, otherwise returns false. +// +// This tells us if access control is enabled for this collection or not. +// +// When there is a policy, in addition to returning true in the last return value, the +// first returned value is policyID, second is the resource name. +func isPermissioned(collection client.Collection) (string, string, bool) { + policy := collection.Definition().Description.Policy + if policy.HasValue() && + policy.Value().ID != "" && + policy.Value().ResourceName != "" { + return policy.Value().ID, policy.Value().ResourceName, true + } + + return "", "", false +} diff --git a/db/permission/register.go b/db/permission/register.go new file mode 100644 index 0000000000..b638a015db --- /dev/null +++ b/db/permission/register.go @@ -0,0 +1,50 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package permission + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + "github.com/sourcenetwork/defradb/client" +) + +// RegisterDocOnCollectionWithACP handles the registration of the document with acp. +// +// Since acp will always exist when this is called we have these components to worry about: +// (1) the request is permissioned (has an identity signature), +// (2) the collection is permissioned (has a policy), +// +// The document is only registered if all (1) (2) are true. +// +// Otherwise, nothing is registered with acp. +func RegisterDocOnCollectionWithACP( + ctx context.Context, + identity immutable.Option[string], + acpSystem acp.ACP, + collection client.Collection, + docID string, +) error { + // An identity exists and the collection has a policy. + if policyID, resourceName, hasPolicy := isPermissioned(collection); hasPolicy && identity.HasValue() { + return acpSystem.RegisterDocObject( + ctx, + identity.Value(), + policyID, + resourceName, + docID, + ) + } + + return nil +} diff --git a/db/request.go b/db/request.go index 69eabebd34..2905ee4de2 100644 --- a/db/request.go +++ b/db/request.go @@ -13,13 +13,20 @@ package db import ( "context" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/planner" ) // execRequest executes a request against the database. -func (db *db) execRequest(ctx context.Context, request string, txn datastore.Txn) *client.RequestResult { +func (db *db) execRequest( + ctx context.Context, + identity immutable.Option[string], + request string, + txn datastore.Txn, +) *client.RequestResult { res := &client.RequestResult{} ast, err := db.parser.BuildRequestAST(request) if err != nil { @@ -44,11 +51,17 @@ func (db *db) execRequest(ctx context.Context, request string, txn datastore.Txn if pub != nil { res.Pub = pub - go db.handleSubscription(ctx, pub, subRequest) + go db.handleSubscription(ctx, identity, pub, subRequest) return res } - planner := planner.New(ctx, db.WithTxn(txn), txn) + planner := planner.New( + ctx, + identity, + db.acp, + db.WithTxn(txn), + txn, + ) results, err := planner.RunRequest(ctx, parsedRequest) if err != nil { diff --git a/db/schema.go b/db/schema.go index 676d983983..5b10df9906 100644 --- a/db/schema.go +++ b/db/schema.go @@ -47,6 +47,12 @@ func (db *db) addSchema( returnDescriptions := make([]client.CollectionDescription, len(newDefinitions)) for i, definition := range newDefinitions { + // Only accept the schema if policy description is valid, otherwise reject the schema. + err := db.validateCollectionDefinitionPolicyDesc(ctx, definition.Description.Policy) + if err != nil { + return nil, err + } + col, err := db.createCollection(ctx, txn, definition) if err != nil { return nil, err diff --git a/db/subscriptions.go b/db/subscriptions.go index bc013ae587..f6f187c54f 100644 --- a/db/subscriptions.go +++ b/db/subscriptions.go @@ -13,6 +13,8 @@ package db import ( "context" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/datastore" @@ -49,6 +51,7 @@ func (db *db) checkForClientSubscriptions(r *request.Request) ( func (db *db) handleSubscription( ctx context.Context, + identity immutable.Option[string], pub *events.Publisher[events.Update], r *request.ObjectSubscription, ) { @@ -59,7 +62,7 @@ func (db *db) handleSubscription( continue } - db.handleEvent(ctx, txn, pub, evt, r) + db.handleEvent(ctx, identity, txn, pub, evt, r) txn.Discard(ctx) } @@ -67,12 +70,19 @@ func (db *db) handleSubscription( func (db *db) handleEvent( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, pub *events.Publisher[events.Update], evt events.Update, r *request.ObjectSubscription, ) { - p := planner.New(ctx, db.WithTxn(txn), txn) + p := planner.New( + ctx, + identity, + db.acp, + db.WithTxn(txn), + txn, + ) s := r.ToSelect(evt.DocID, evt.Cid.String()) diff --git a/db/txn_db.go b/db/txn_db.go index 09a7002033..e77176b433 100644 --- a/db/txn_db.go +++ b/db/txn_db.go @@ -14,6 +14,7 @@ import ( "context" "github.com/lens-vm/lens/host-go/config/model" + "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" @@ -36,7 +37,11 @@ type explicitTxnDB struct { } // ExecRequest executes a request against the database. -func (db *implicitTxnDB) ExecRequest(ctx context.Context, request string) *client.RequestResult { +func (db *implicitTxnDB) ExecRequest( + ctx context.Context, + identity immutable.Option[string], + request string, +) *client.RequestResult { txn, err := db.NewTxn(ctx, false) if err != nil { res := &client.RequestResult{} @@ -45,7 +50,7 @@ func (db *implicitTxnDB) ExecRequest(ctx context.Context, request string) *clien } defer txn.Discard(ctx) - res := db.execRequest(ctx, request, txn) + res := db.execRequest(ctx, identity, request, txn) if len(res.GQL.Errors) > 0 { return res } @@ -61,9 +66,10 @@ func (db *implicitTxnDB) ExecRequest(ctx context.Context, request string) *clien // ExecRequest executes a transaction request against the database. func (db *explicitTxnDB) ExecRequest( ctx context.Context, + identity immutable.Option[string], request string, ) *client.RequestResult { - return db.execRequest(ctx, request, db.txn) + return db.execRequest(ctx, identity, request, db.txn) } // GetCollectionByName returns an existing collection within the database. diff --git a/docs/cli/defradb.md b/docs/cli/defradb.md index c89ce0f1aa..00b29d7392 100644 --- a/docs/cli/defradb.md +++ b/docs/cli/defradb.md @@ -14,11 +14,12 @@ Start a DefraDB node, interact with a local or remote node, and much more. ``` --allowed-origins stringArray List of origins to allow for CORS requests -h, --help help for defradb - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client.md b/docs/cli/defradb_client.md index 30e8c804ee..1b5532ac9f 100644 --- a/docs/cli/defradb_client.md +++ b/docs/cli/defradb_client.md @@ -18,11 +18,12 @@ Execute queries, add schema types, obtain node info, etc. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) @@ -38,6 +39,7 @@ Execute queries, add schema types, obtain node info, etc. ### SEE ALSO * [defradb](defradb.md) - DefraDB Edge Database +* [defradb client acp](defradb_client_acp.md) - Interact with the access control system of a DefraDB node * [defradb client backup](defradb_client_backup.md) - Interact with the backup utility * [defradb client collection](defradb_client_collection.md) - Interact with a collection. * [defradb client dump](defradb_client_dump.md) - Dump the contents of DefraDB node-side diff --git a/docs/cli/defradb_client_acp.md b/docs/cli/defradb_client_acp.md new file mode 100644 index 0000000000..ab4ac22d6d --- /dev/null +++ b/docs/cli/defradb_client_acp.md @@ -0,0 +1,46 @@ +## defradb client acp + +Interact with the access control system of a DefraDB node + +### Synopsis + +Interact with the access control system of a DefraDB node + +Learn more about [ACP](/acp/README.md) + + + +### Options + +``` + -h, --help help for acp +``` + +### Options inherited from parent commands + +``` + --allowed-origins stringArray List of origins to allow for CORS requests + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --no-p2p Disable the peer-to-peer network synchronization system + --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) + --peers stringArray List of peers to connect to + --privkeypath string Path to the private key for tls + --pubkeypath string Path to the public key for tls + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --store string Specify the datastore to use (supported: badger, memory) (default "badger") + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") + --valuelogfilesize int Specify the datastore value log file size (in bytes). In memory size will be 2*valuelogfilesize (default 1073741824) +``` + +### SEE ALSO + +* [defradb client](defradb_client.md) - Interact with a DefraDB node +* [defradb client acp policy](defradb_client_acp_policy.md) - Interact with the acp policy features of DefraDB instance + diff --git a/docs/cli/defradb_client_acp_policy.md b/docs/cli/defradb_client_acp_policy.md new file mode 100644 index 0000000000..ef6d02e3dc --- /dev/null +++ b/docs/cli/defradb_client_acp_policy.md @@ -0,0 +1,42 @@ +## defradb client acp policy + +Interact with the acp policy features of DefraDB instance + +### Synopsis + +Interact with the acp policy features of DefraDB instance + +### Options + +``` + -h, --help help for policy +``` + +### Options inherited from parent commands + +``` + --allowed-origins stringArray List of origins to allow for CORS requests + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --no-p2p Disable the peer-to-peer network synchronization system + --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) + --peers stringArray List of peers to connect to + --privkeypath string Path to the private key for tls + --pubkeypath string Path to the public key for tls + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --store string Specify the datastore to use (supported: badger, memory) (default "badger") + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") + --valuelogfilesize int Specify the datastore value log file size (in bytes). In memory size will be 2*valuelogfilesize (default 1073741824) +``` + +### SEE ALSO + +* [defradb client acp](defradb_client_acp.md) - Interact with the access control system of a DefraDB node +* [defradb client acp policy add](defradb_client_acp_policy_add.md) - Add new policy + diff --git a/docs/cli/defradb_client_acp_policy_add.md b/docs/cli/defradb_client_acp_policy_add.md new file mode 100644 index 0000000000..322842a962 --- /dev/null +++ b/docs/cli/defradb_client_acp_policy_add.md @@ -0,0 +1,90 @@ +## defradb client acp policy add + +Add new policy + +### Synopsis + +Add new policy + +Notes: + - Can not add a policy without specifying an identity. + - ACP must be available (i.e. ACP can not be disabled). + - A non-DPI policy will be accepted (will be registered with acp system). + - But only a valid DPI policyID & resource can be specified on a schema. + - DPI validation happens when attempting to add a schema with '@policy'. + - Learn more about [ACP & DPI Rules](/acp/README.md) + +Example: add from an argument string: + defradb client acp policy add -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j ' +description: A Valid DefraDB Policy Interface + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor +' + +Example: add from file: + defradb client acp policy add -i cosmos17r39df0hdcrgnmmw4mvu7qgk5nu888c7uvv37y -f policy.yml + +Example: add from file, verbose flags: + defradb client acp policy add --identity cosmos1kpw734v54g0t0d8tcye8ee5jc3gld0tcr2q473 --file policy.yml + +Example: add from stdin: + cat policy.yml | defradb client acp policy add - + + + +``` +defradb client acp policy add [-i --identity] [policy] [flags] +``` + +### Options + +``` + -f, --file string File to load a policy from + -h, --help help for add + -i, --identity string [Required] Identity of the creator +``` + +### Options inherited from parent commands + +``` + --allowed-origins stringArray List of origins to allow for CORS requests + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --no-p2p Disable the peer-to-peer network synchronization system + --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) + --peers stringArray List of peers to connect to + --privkeypath string Path to the private key for tls + --pubkeypath string Path to the public key for tls + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --store string Specify the datastore to use (supported: badger, memory) (default "badger") + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") + --valuelogfilesize int Specify the datastore value log file size (in bytes). In memory size will be 2*valuelogfilesize (default 1073741824) +``` + +### SEE ALSO + +* [defradb client acp policy](defradb_client_acp_policy.md) - Interact with the acp policy features of DefraDB instance + diff --git a/docs/cli/defradb_client_backup.md b/docs/cli/defradb_client_backup.md index a7c7ae453b..c8b48818b9 100644 --- a/docs/cli/defradb_client_backup.md +++ b/docs/cli/defradb_client_backup.md @@ -17,11 +17,12 @@ Currently only supports JSON format. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_backup_export.md b/docs/cli/defradb_client_backup_export.md index 6992b120c6..c47e46de3f 100644 --- a/docs/cli/defradb_client_backup_export.md +++ b/docs/cli/defradb_client_backup_export.md @@ -31,11 +31,12 @@ defradb client backup export [-c --collections | -p --pretty | -f --format] ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_backup_import.md b/docs/cli/defradb_client_backup_import.md index ad2d3a1117..9e2c9c54e0 100644 --- a/docs/cli/defradb_client_backup_import.md +++ b/docs/cli/defradb_client_backup_import.md @@ -23,11 +23,12 @@ defradb client backup import [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_collection.md b/docs/cli/defradb_client_collection.md index 593e2d01ee..c33b9970bf 100644 --- a/docs/cli/defradb_client_collection.md +++ b/docs/cli/defradb_client_collection.md @@ -21,11 +21,12 @@ Create, read, update, and delete documents within a collection. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) @@ -46,5 +47,6 @@ Create, read, update, and delete documents within a collection. * [defradb client collection describe](defradb_client_collection_describe.md) - View collection description. * [defradb client collection docIDs](defradb_client_collection_docIDs.md) - List all document IDs (docIDs). * [defradb client collection get](defradb_client_collection_get.md) - View document fields. +* [defradb client collection patch](defradb_client_collection_patch.md) - Patch existing collection descriptions * [defradb client collection update](defradb_client_collection_update.md) - Update documents by docID or filter. diff --git a/docs/cli/defradb_client_collection_create.md b/docs/cli/defradb_client_collection_create.md index 7c2cba7487..492bb6a060 100644 --- a/docs/cli/defradb_client_collection_create.md +++ b/docs/cli/defradb_client_collection_create.md @@ -6,28 +6,32 @@ Create a new document. Create a new document. -Example: create from string +Example: create from string: defradb client collection create --name User '{ "name": "Bob" }' -Example: create multiple from string +Example: create from string, with identity: + defradb client collection create -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User '{ "name": "Bob" }' + +Example: create multiple from string: defradb client collection create --name User '[{ "name": "Alice" }, { "name": "Bob" }]' -Example: create from file +Example: create from file: defradb client collection create --name User -f document.json -Example: create from stdin +Example: create from stdin: cat document.json | defradb client collection create --name User - ``` -defradb client collection create [flags] +defradb client collection create [-i --identity] [flags] ``` ### Options ``` - -f, --file string File containing document(s) - -h, --help help for create + -f, --file string File containing document(s) + -h, --help help for create + -i, --identity string Identity of the actor ``` ### Options inherited from parent commands @@ -35,11 +39,12 @@ defradb client collection create [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_collection_delete.md b/docs/cli/defradb_client_collection_delete.md index 33a5af4809..ce2f6ff8ab 100644 --- a/docs/cli/defradb_client_collection_delete.md +++ b/docs/cli/defradb_client_collection_delete.md @@ -6,23 +6,27 @@ Delete documents by docID or filter. Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by docID(s) - defradb client collection delete --name User --docID bae-123,bae-456 +Example: delete by docID(s): + defradb client collection delete --name User --docID bae-123,bae-456 -Example: delete by filter +Example: delete by docID(s) with identity: + defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User --docID bae-123,bae-456 + +Example: delete by filter: defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' ``` -defradb client collection delete [--filter --docID ] [flags] +defradb client collection delete [-i --identity] [--filter --docID ] [flags] ``` ### Options ``` - --docID strings Document ID - --filter string Document filter - -h, --help help for delete + --docID strings Document ID + --filter string Document filter + -h, --help help for delete + -i, --identity string Identity of the actor ``` ### Options inherited from parent commands @@ -30,11 +34,12 @@ defradb client collection delete [--filter --docID ] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_collection_describe.md b/docs/cli/defradb_client_collection_describe.md index 46e8623d6a..54d4dd1b99 100644 --- a/docs/cli/defradb_client_collection_describe.md +++ b/docs/cli/defradb_client_collection_describe.md @@ -37,11 +37,12 @@ defradb client collection describe [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_collection_docIDs.md b/docs/cli/defradb_client_collection_docIDs.md index c976d05417..1ee91d7462 100644 --- a/docs/cli/defradb_client_collection_docIDs.md +++ b/docs/cli/defradb_client_collection_docIDs.md @@ -6,18 +6,22 @@ List all document IDs (docIDs). List all document IDs (docIDs). -Example: +Example: list all docID(s): defradb client collection docIDs --name User + +Example: list all docID(s), with an identity: + defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User ``` -defradb client collection docIDs [flags] +defradb client collection docIDs [-i --identity] [flags] ``` ### Options ``` - -h, --help help for docIDs + -h, --help help for docIDs + -i, --identity string Identity of the actor ``` ### Options inherited from parent commands @@ -25,11 +29,12 @@ defradb client collection docIDs [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_collection_get.md b/docs/cli/defradb_client_collection_get.md index c2aeac17b3..a73228829b 100644 --- a/docs/cli/defradb_client_collection_get.md +++ b/docs/cli/defradb_client_collection_get.md @@ -8,17 +8,21 @@ View document fields. Example: defradb client collection get --name User bae-123 + +Example to get a private document we must use an identity: + defradb client collection get -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User bae-123 ``` -defradb client collection get [--show-deleted] [flags] +defradb client collection get [-i --identity] [--show-deleted] [flags] ``` ### Options ``` - -h, --help help for get - --show-deleted Show deleted documents + -h, --help help for get + -i, --identity string Identity of the actor + --show-deleted Show deleted documents ``` ### Options inherited from parent commands @@ -26,11 +30,12 @@ defradb client collection get [--show-deleted] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_collection_patch.md b/docs/cli/defradb_client_collection_patch.md new file mode 100644 index 0000000000..a5d0179a41 --- /dev/null +++ b/docs/cli/defradb_client_collection_patch.md @@ -0,0 +1,63 @@ +## defradb client collection patch + +Patch existing collection descriptions + +### Synopsis + +Patch existing collection descriptions. + +Uses JSON Patch to modify collection descriptions. + +Example: patch from an argument string: + defradb client collection patch '[{ "op": "add", "path": "...", "value": {...} }]' + +Example: patch from file: + defradb client collection patch -p patch.json + +Example: patch from stdin: + cat patch.json | defradb client collection patch - + +To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.source.network. + +``` +defradb client collection patch [patch] [flags] +``` + +### Options + +``` + -h, --help help for patch + -p, --patch-file string File to load a patch from +``` + +### Options inherited from parent commands + +``` + --allowed-origins stringArray List of origins to allow for CORS requests + --get-inactive Get inactive collections as well as active + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --name string Collection name + --no-p2p Disable the peer-to-peer network synchronization system + --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) + --peers stringArray List of peers to connect to + --privkeypath string Path to the private key for tls + --pubkeypath string Path to the public key for tls + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --schema string Collection schema Root + --store string Specify the datastore to use (supported: badger, memory) (default "badger") + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") + --valuelogfilesize int Specify the datastore value log file size (in bytes). In memory size will be 2*valuelogfilesize (default 1073741824) + --version string Collection version ID +``` + +### SEE ALSO + +* [defradb client collection](defradb_client_collection.md) - Interact with a collection. + diff --git a/docs/cli/defradb_client_collection_update.md b/docs/cli/defradb_client_collection_update.md index 1200cc5b3e..dd2d1864c3 100644 --- a/docs/cli/defradb_client_collection_update.md +++ b/docs/cli/defradb_client_collection_update.md @@ -6,29 +6,34 @@ Update documents by docID or filter. Update documents by docID or filter. -Example: update from string +Example: update from string: defradb client collection update --name User --docID bae-123 '{ "name": "Bob" }' -Example: update by filter +Example: update by filter: defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by docIDs +Example: update by docIDs: defradb client collection update --name User \ --docID bae-123,bae-456 --updater '{ "verified": true }' + +Example: update private docIDs, with identity: + defradb client collection update -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User \ + --docID bae-123,bae-456 --updater '{ "verified": true }' ``` -defradb client collection update [--filter --docID --updater ] [flags] +defradb client collection update [-i --identity] [--filter --docID --updater ] [flags] ``` ### Options ``` - --docID strings Document ID - --filter string Document filter - -h, --help help for update - --updater string Document updater + --docID strings Document ID + --filter string Document filter + -h, --help help for update + -i, --identity string Identity of the actor + --updater string Document updater ``` ### Options inherited from parent commands @@ -36,11 +41,12 @@ defradb client collection update [--filter --docID --updater ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --name string Collection name --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/cli/defradb_client_dump.md b/docs/cli/defradb_client_dump.md index bc00e292b9..3eff3e397e 100644 --- a/docs/cli/defradb_client_dump.md +++ b/docs/cli/defradb_client_dump.md @@ -16,11 +16,12 @@ defradb client dump [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_index.md b/docs/cli/defradb_client_index.md index 0dab1de7fe..22e136c11c 100644 --- a/docs/cli/defradb_client_index.md +++ b/docs/cli/defradb_client_index.md @@ -16,11 +16,12 @@ Manage (create, drop, or list) collection indexes on a DefraDB node. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_index_create.md b/docs/cli/defradb_client_index_create.md index cbdbbe1d50..9cecd2d0ff 100644 --- a/docs/cli/defradb_client_index_create.md +++ b/docs/cli/defradb_client_index_create.md @@ -33,11 +33,12 @@ defradb client index create -c --collection --fields [-n - ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_index_drop.md b/docs/cli/defradb_client_index_drop.md index bb9e6ec30a..9659958a28 100644 --- a/docs/cli/defradb_client_index_drop.md +++ b/docs/cli/defradb_client_index_drop.md @@ -25,11 +25,12 @@ defradb client index drop -c --collection -n --name [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_index_list.md b/docs/cli/defradb_client_index_list.md index a2d7ca8dd0..ee00938d6e 100644 --- a/docs/cli/defradb_client_index_list.md +++ b/docs/cli/defradb_client_index_list.md @@ -27,11 +27,12 @@ defradb client index list [-c --collection ] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p.md b/docs/cli/defradb_client_p2p.md index 171e2ab661..020506b92f 100644 --- a/docs/cli/defradb_client_p2p.md +++ b/docs/cli/defradb_client_p2p.md @@ -16,11 +16,12 @@ Interact with the DefraDB P2P system ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_collection.md b/docs/cli/defradb_client_p2p_collection.md index 11ace67212..873362f041 100644 --- a/docs/cli/defradb_client_p2p_collection.md +++ b/docs/cli/defradb_client_p2p_collection.md @@ -17,11 +17,12 @@ The selected collections synchronize their events on the pubsub network. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_collection_add.md b/docs/cli/defradb_client_p2p_collection_add.md index c54f235a60..6fa00a5673 100644 --- a/docs/cli/defradb_client_p2p_collection_add.md +++ b/docs/cli/defradb_client_p2p_collection_add.md @@ -28,11 +28,12 @@ defradb client p2p collection add [collectionIDs] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_collection_getall.md b/docs/cli/defradb_client_p2p_collection_getall.md index 07c536d716..e2946022cb 100644 --- a/docs/cli/defradb_client_p2p_collection_getall.md +++ b/docs/cli/defradb_client_p2p_collection_getall.md @@ -21,11 +21,12 @@ defradb client p2p collection getall [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_collection_remove.md b/docs/cli/defradb_client_p2p_collection_remove.md index 5a8eb969b6..da09bdf70f 100644 --- a/docs/cli/defradb_client_p2p_collection_remove.md +++ b/docs/cli/defradb_client_p2p_collection_remove.md @@ -28,11 +28,12 @@ defradb client p2p collection remove [collectionIDs] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_info.md b/docs/cli/defradb_client_p2p_info.md index 27fdf7cb9b..809d84fb6b 100644 --- a/docs/cli/defradb_client_p2p_info.md +++ b/docs/cli/defradb_client_p2p_info.md @@ -20,11 +20,12 @@ defradb client p2p info [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_replicator.md b/docs/cli/defradb_client_p2p_replicator.md index 725845a726..8b5dc88fdb 100644 --- a/docs/cli/defradb_client_p2p_replicator.md +++ b/docs/cli/defradb_client_p2p_replicator.md @@ -17,11 +17,12 @@ A replicator replicates one or all collection(s) from one node to another. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_replicator_delete.md b/docs/cli/defradb_client_p2p_replicator_delete.md index ef89979be6..c5fded6a5b 100644 --- a/docs/cli/defradb_client_p2p_replicator_delete.md +++ b/docs/cli/defradb_client_p2p_replicator_delete.md @@ -26,11 +26,12 @@ defradb client p2p replicator delete [-c, --collection] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_replicator_getall.md b/docs/cli/defradb_client_p2p_replicator_getall.md index 4d33b5243f..9f983de9fa 100644 --- a/docs/cli/defradb_client_p2p_replicator_getall.md +++ b/docs/cli/defradb_client_p2p_replicator_getall.md @@ -25,11 +25,12 @@ defradb client p2p replicator getall [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_p2p_replicator_set.md b/docs/cli/defradb_client_p2p_replicator_set.md index 55654ded0f..0f3446c87b 100644 --- a/docs/cli/defradb_client_p2p_replicator_set.md +++ b/docs/cli/defradb_client_p2p_replicator_set.md @@ -26,11 +26,12 @@ defradb client p2p replicator set [-c, --collection] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_query.md b/docs/cli/defradb_client_query.md index b23bf50553..cd320e9de6 100644 --- a/docs/cli/defradb_client_query.md +++ b/docs/cli/defradb_client_query.md @@ -12,6 +12,9 @@ A query request can be sent as a single argument. Example command: Do a query request from a file by using the '-f' flag. Example command: defradb client query -f request.graphql +Do a query request from a file and with an identity. Example command: + defradb client query -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j -f request.graphql + Or it can be sent via stdin by using the '-' special syntax. Example command: cat request.graphql | defradb client query - @@ -21,25 +24,27 @@ with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network. ``` -defradb client query [query request] [flags] +defradb client query [-i --identity] [request] [flags] ``` ### Options ``` - -f, --file string File containing the query request - -h, --help help for query + -f, --file string File containing the query request + -h, --help help for query + -i, --identity string Identity of the actor ``` ### Options inherited from parent commands ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema.md b/docs/cli/defradb_client_schema.md index d37251c8db..2e5a7db88c 100644 --- a/docs/cli/defradb_client_schema.md +++ b/docs/cli/defradb_client_schema.md @@ -16,11 +16,12 @@ Make changes, updates, or look for existing schema types. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_add.md b/docs/cli/defradb_client_schema_add.md index e0ad675241..dc72a6a354 100644 --- a/docs/cli/defradb_client_schema_add.md +++ b/docs/cli/defradb_client_schema_add.md @@ -6,6 +6,11 @@ Add new schema Add new schema. +Schema Object with a '@policy(id:".." resource: "..")' linked will only be accepted if: + - ACP is available (i.e. ACP is not disabled). + - The specified resource adheres to the Document Access Control DPI Rules. + - Learn more about [ACP & DPI Rules](/acp/README.md) + Example: add from an argument string: defradb client schema add 'type Foo { ... }' @@ -32,11 +37,12 @@ defradb client schema add [schema] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_describe.md b/docs/cli/defradb_client_schema_describe.md index cd79cce3c1..3ab0c1dda8 100644 --- a/docs/cli/defradb_client_schema_describe.md +++ b/docs/cli/defradb_client_schema_describe.md @@ -36,11 +36,12 @@ defradb client schema describe [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration.md b/docs/cli/defradb_client_schema_migration.md index b49420401c..2ee26c8521 100644 --- a/docs/cli/defradb_client_schema_migration.md +++ b/docs/cli/defradb_client_schema_migration.md @@ -16,11 +16,12 @@ Make set or look for existing schema migrations on a DefraDB node. ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_down.md b/docs/cli/defradb_client_schema_migration_down.md index 6172bf09b1..e5541396f6 100644 --- a/docs/cli/defradb_client_schema_migration_down.md +++ b/docs/cli/defradb_client_schema_migration_down.md @@ -33,11 +33,12 @@ defradb client schema migration down --collection [fl ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_get.md b/docs/cli/defradb_client_schema_migration_get.md deleted file mode 100644 index 20ed8edb91..0000000000 --- a/docs/cli/defradb_client_schema_migration_get.md +++ /dev/null @@ -1,41 +0,0 @@ -## defradb client schema migration get - -Gets the schema migrations within DefraDB - -### Synopsis - -Gets the schema migrations within the local DefraDB node. - -Example: - defradb client schema migration get' - -Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network. - -``` -defradb client schema migration get [flags] -``` - -### Options - -``` - -h, --help help for get -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client schema migration](defradb_client_schema_migration.md) - Interact with the schema migration system of a running DefraDB instance - diff --git a/docs/cli/defradb_client_schema_migration_reload.md b/docs/cli/defradb_client_schema_migration_reload.md index 01051e419a..ef89b749e8 100644 --- a/docs/cli/defradb_client_schema_migration_reload.md +++ b/docs/cli/defradb_client_schema_migration_reload.md @@ -20,11 +20,12 @@ defradb client schema migration reload [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_set-registry.md b/docs/cli/defradb_client_schema_migration_set-registry.md index 8e80aa132d..2eae5aba48 100644 --- a/docs/cli/defradb_client_schema_migration_set-registry.md +++ b/docs/cli/defradb_client_schema_migration_set-registry.md @@ -26,11 +26,12 @@ defradb client schema migration set-registry [collectionID] [cfg] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_set.md b/docs/cli/defradb_client_schema_migration_set.md index 9e6bcfcfc4..1a5f923218 100644 --- a/docs/cli/defradb_client_schema_migration_set.md +++ b/docs/cli/defradb_client_schema_migration_set.md @@ -33,11 +33,12 @@ defradb client schema migration set [src] [dst] [cfg] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_migration_up.md b/docs/cli/defradb_client_schema_migration_up.md index bcd28453cf..62fd063cd4 100644 --- a/docs/cli/defradb_client_schema_migration_up.md +++ b/docs/cli/defradb_client_schema_migration_up.md @@ -33,11 +33,12 @@ defradb client schema migration up --collection [flag ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_patch.md b/docs/cli/defradb_client_schema_patch.md index f24670b945..393786eb05 100644 --- a/docs/cli/defradb_client_schema_patch.md +++ b/docs/cli/defradb_client_schema_patch.md @@ -12,7 +12,7 @@ Example: patch from an argument string: defradb client schema patch '[{ "op": "add", "path": "...", "value": {...} }]' '{"lenses": [...' Example: patch from file: - defradb client schema patch -f patch.json + defradb client schema patch -p patch.json Example: patch from stdin: cat patch.json | defradb client schema patch - @@ -36,11 +36,12 @@ defradb client schema patch [schema] [migration] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_set-active.md b/docs/cli/defradb_client_schema_set-active.md index ff94ff88fe..77ec843f9c 100644 --- a/docs/cli/defradb_client_schema_set-active.md +++ b/docs/cli/defradb_client_schema_set-active.md @@ -21,11 +21,12 @@ defradb client schema set-active [versionID] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_schema_set-default.md b/docs/cli/defradb_client_schema_set-default.md deleted file mode 100644 index 0698b0e6d5..0000000000 --- a/docs/cli/defradb_client_schema_set-default.md +++ /dev/null @@ -1,36 +0,0 @@ -## defradb client schema set-default - -Set the default schema version - -### Synopsis - -Set the default schema version - -``` -defradb client schema set-default [versionID] [flags] -``` - -### Options - -``` - -h, --help help for set-default -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node - diff --git a/docs/cli/defradb_client_tx.md b/docs/cli/defradb_client_tx.md index 65f7740419..f3007f574e 100644 --- a/docs/cli/defradb_client_tx.md +++ b/docs/cli/defradb_client_tx.md @@ -16,11 +16,12 @@ Create, commit, and discard DefraDB transactions ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_tx_commit.md b/docs/cli/defradb_client_tx_commit.md index 621459e134..e9ae2e529d 100644 --- a/docs/cli/defradb_client_tx_commit.md +++ b/docs/cli/defradb_client_tx_commit.md @@ -20,11 +20,12 @@ defradb client tx commit [id] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_tx_create.md b/docs/cli/defradb_client_tx_create.md index cf695da6c7..a2c45a9b44 100644 --- a/docs/cli/defradb_client_tx_create.md +++ b/docs/cli/defradb_client_tx_create.md @@ -22,11 +22,12 @@ defradb client tx create [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_tx_discard.md b/docs/cli/defradb_client_tx_discard.md index 7340bedf2a..2b1b4badb8 100644 --- a/docs/cli/defradb_client_tx_discard.md +++ b/docs/cli/defradb_client_tx_discard.md @@ -20,11 +20,12 @@ defradb client tx discard [id] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_view.md b/docs/cli/defradb_client_view.md index 9b93884430..8b8f47e8bc 100644 --- a/docs/cli/defradb_client_view.md +++ b/docs/cli/defradb_client_view.md @@ -16,11 +16,12 @@ Manage (add) views withing a running DefraDB instance ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_client_view_add.md b/docs/cli/defradb_client_view_add.md index cdbab25a51..e522b86f1b 100644 --- a/docs/cli/defradb_client_view_add.md +++ b/docs/cli/defradb_client_view_add.md @@ -26,11 +26,12 @@ defradb client view add [query] [sdl] [transform] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_init.md b/docs/cli/defradb_init.md deleted file mode 100644 index f8d69f5794..0000000000 --- a/docs/cli/defradb_init.md +++ /dev/null @@ -1,37 +0,0 @@ -## defradb init - -Initialize DefraDB's root directory and configuration file - -### Synopsis - -Initialize a directory for configuration and data at the given path. -Passed flags will be persisted in the stored configuration. - -``` -defradb init [flags] -``` - -### Options - -``` - -h, --help help for init - --reinitialize Reinitialize the configuration file -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb](defradb.md) - DefraDB Edge Database - diff --git a/docs/cli/defradb_server-dump.md b/docs/cli/defradb_server-dump.md index 2b590da6fe..ff58487c65 100644 --- a/docs/cli/defradb_server-dump.md +++ b/docs/cli/defradb_server-dump.md @@ -16,11 +16,12 @@ defradb server-dump [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_start.md b/docs/cli/defradb_start.md index 2591f9bc06..4a4edeaf48 100644 --- a/docs/cli/defradb_start.md +++ b/docs/cli/defradb_start.md @@ -20,11 +20,12 @@ defradb start [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/docs/cli/defradb_version.md b/docs/cli/defradb_version.md index ce43eb148c..810d0dc477 100644 --- a/docs/cli/defradb_version.md +++ b/docs/cli/defradb_version.md @@ -18,11 +18,12 @@ defradb version [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests - --logformat string Log format to use. Options are csv, json (default "csv") - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) diff --git a/examples/dpi_policy/user_dpi_policy.json b/examples/dpi_policy/user_dpi_policy.json new file mode 100644 index 0000000000..74028d8ee6 --- /dev/null +++ b/examples/dpi_policy/user_dpi_policy.json @@ -0,0 +1,30 @@ +{ + "description": "A Valid Defra Policy Interface (DPI)", + "actor": { + "name": "actor" + }, + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner + reader" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + }, + "reader": { + "types": [ + "actor" + ] + } + } + } + } +} diff --git a/examples/dpi_policy/user_dpi_policy.yml b/examples/dpi_policy/user_dpi_policy.yml new file mode 100644 index 0000000000..fafae06957 --- /dev/null +++ b/examples/dpi_policy/user_dpi_policy.yml @@ -0,0 +1,29 @@ +# The below policy contains an example with valid DPI compliant resource that can be linked to a collection +# object during the schema add command to have access control enabled for documents of that collection. +# +# This policy is specified to the Users object example in: `examples/schema/permissioned/users.graphql` +# +# The same policy example in json format is in: `examples/dpi_policy/user_dpi_policy.json` +# +# Learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + +description: A Valid DefraDB Policy Interface (DPI) + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor diff --git a/examples/schema/permissioned/book.graphql b/examples/schema/permissioned/book.graphql new file mode 100644 index 0000000000..96bdcbb877 --- /dev/null +++ b/examples/schema/permissioned/book.graphql @@ -0,0 +1,14 @@ +# The below sdl contains an example `Book` object with an example source hub policy id and resource name. +# +# The policy id must exist in sourcehub (for remote acp) or local acp first, and the resource name +# must exist on the corresponding policy to the policy id. +# +# Note: The resource name does not need to be similar to the collection name. +# +# The policy must be a valid DPI, learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + +type Book @policy(id:"7dc51aabc0248cf106265c902bf56faa1989ec41a6bbd36b6e438cfade7aee4a", resource:"book") { + name: String + rating: Float +} + diff --git a/examples/schema/permissioned/users.graphql b/examples/schema/permissioned/users.graphql new file mode 100644 index 0000000000..771e6da2c9 --- /dev/null +++ b/examples/schema/permissioned/users.graphql @@ -0,0 +1,18 @@ +# The below sdl contains an example `Users` object with an example source hub policy id and resource name. +# +# The policy id must exist in sourcehub (for remote acp) or local acp first, and the resource name +# must exist on the corresponding policy to the policy id. +# +# The resource name does not need to be similar to the collection name. +# +# The linked policy id and resource correspond to the policy at: `examples/dpi_policy/user_dpi_policy.yml` +# +# The policy must be a valid DPI, learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + +type Users @policy( + id: "24ab8cba6d6f0bcfe4d2712c7d95c09dd1b8076ea5a8896476413fd6c891c18c", + resource: "users" +) { + name: String + age: Int +} diff --git a/go.mod b/go.mod index eff296f3e3..35047db29f 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.21.3 require ( github.com/bits-and-blooms/bitset v1.13.0 github.com/bxcodec/faker v2.0.1+incompatible + github.com/cosmos/gogoproto v1.4.11 github.com/evanphx/json-patch/v5 v5.9.0 github.com/fxamacker/cbor/v2 v2.6.0 github.com/getkin/kin-openapi v0.123.0 @@ -34,6 +35,7 @@ require ( github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13 github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd github.com/sourcenetwork/immutable v0.3.0 + github.com/sourcenetwork/sourcehub v0.2.1-0.20240305165631-9b75b1000724 github.com/spf13/cobra v1.8.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.18.2 @@ -45,53 +47,131 @@ require ( go.opentelemetry.io/otel/metric v1.24.0 go.opentelemetry.io/otel/sdk/metric v1.24.0 go.uber.org/zap v1.27.0 - golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc + golang.org/x/exp v0.0.0-20240213143201-ec583247a57a google.golang.org/grpc v1.62.1 - google.golang.org/protobuf v1.33.0 + google.golang.org/protobuf v1.32.0 ) require ( + buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1 // indirect + cosmossdk.io/api v0.7.3 // indirect + cosmossdk.io/collections v0.4.0 // indirect + cosmossdk.io/core v0.11.0 // indirect + cosmossdk.io/depinject v1.0.0-alpha.4 // indirect + cosmossdk.io/errors v1.0.1 // indirect + cosmossdk.io/log v1.3.1 // indirect + cosmossdk.io/math v1.2.0 // indirect + cosmossdk.io/store v1.0.2 // indirect + cosmossdk.io/x/tx v0.13.0 // indirect + filippo.io/edwards25519 v1.0.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.1 // indirect + github.com/DataDog/datadog-go v3.2.0+incompatible // indirect + github.com/DataDog/zstd v1.5.5 // indirect github.com/Jorropo/jsync v1.0.1 // indirect + github.com/NathanBaulch/protoc-gen-cobra v1.2.1 // indirect + github.com/awalterschulze/gographviz v2.0.3+incompatible // indirect github.com/benbjohnson/clock v1.3.5 // indirect github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect + github.com/btcsuite/btcd v0.22.1 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce // indirect github.com/bytecodealliance/wasmtime-go/v15 v15.0.0 // indirect + github.com/cenkalti/backoff/v4 v4.2.1 // indirect + github.com/cespare/xxhash v1.1.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cockroachdb/errors v1.11.1 // indirect + github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect + github.com/cockroachdb/pebble v1.1.0 // indirect + github.com/cockroachdb/redact v1.1.5 // indirect + github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect + github.com/cometbft/cometbft v0.38.5 // indirect + github.com/cometbft/cometbft-db v0.9.1 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/cosmos-db v1.0.0 // indirect + github.com/cosmos/cosmos-proto v1.0.0-beta.4 // indirect + github.com/cosmos/cosmos-sdk v0.50.4 // indirect + github.com/cosmos/go-bip39 v1.0.0 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/gorocksdb v1.2.0 // indirect + github.com/cosmos/iavl v1.0.1 // indirect + github.com/cosmos/ics23/go v0.10.0 // indirect + github.com/cosmos/ledger-cosmos-go v0.13.3 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect github.com/cskr/pubsub v1.0.2 // indirect + github.com/danieljoos/wincred v1.2.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/davidlazar/go-crypto v0.0.0-20200604182044-b73af7476f6c // indirect github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v2 v2.2007.4 // indirect github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.6.0 // indirect github.com/elastic/gosigar v0.14.2 // indirect + github.com/emicklei/dot v1.6.1 // indirect + github.com/fatih/color v1.15.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect github.com/flynn/noise v1.0.1 // indirect github.com/francoispqt/gojay v1.2.13 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect + github.com/getsentry/sentry-go v0.27.0 // indirect + github.com/go-jose/go-jose/v3 v3.0.1-0.20221117193127-916db76e8214 // indirect + github.com/go-kit/kit v0.12.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.6.0 // indirect github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.20.2 // indirect github.com/go-openapi/swag v0.22.8 // indirect github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect github.com/godbus/dbus/v5 v5.1.0 // indirect + github.com/gogo/googleapis v1.4.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/glog v1.2.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.1.2 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect + github.com/google/go-cmp v0.6.0 // indirect github.com/google/gopacket v1.1.19 // indirect + github.com/google/orderedcode v0.0.1 // indirect github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42 // indirect github.com/google/uuid v1.6.0 // indirect + github.com/gorilla/handlers v1.5.2 // indirect + github.com/gorilla/mux v1.8.1 // indirect github.com/gorilla/websocket v1.5.0 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect + github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-hclog v1.5.0 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-metrics v0.5.2 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-plugin v1.5.2 // indirect github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/hashicorp/golang-lru/arc/v2 v2.0.5 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hashicorp/yamux v0.1.1 // indirect + github.com/hdevalence/ed25519consensus v0.1.0 // indirect + github.com/huandu/skiplist v1.2.0 // indirect github.com/huin/goupnp v1.3.0 // indirect + github.com/hyperledger/aries-framework-go v0.3.2 // indirect + github.com/hyperledger/aries-framework-go/component/kmscrypto v0.0.0-20230427134832-0c9969493bd3 // indirect + github.com/hyperledger/aries-framework-go/component/log v0.0.0-20230427134832-0c9969493bd3 // indirect + github.com/hyperledger/aries-framework-go/component/models v0.0.0-20230501135648-a9a7ad029347 // indirect + github.com/hyperledger/aries-framework-go/spi v0.0.0-20230427134832-0c9969493bd3 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/invopop/yaml v0.2.0 // indirect github.com/ipfs/bbloom v0.0.4 // indirect @@ -107,10 +187,15 @@ require ( github.com/ipld/go-ipld-prime v0.21.0 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect - github.com/klauspost/compress v1.17.4 // indirect + github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69 // indirect + github.com/klauspost/compress v1.17.6 // indirect github.com/klauspost/cpuid/v2 v2.2.6 // indirect github.com/koron/go-ssdp v0.0.4 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lib/pq v1.10.7 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect github.com/libp2p/go-cidranger v1.1.0 // indirect github.com/libp2p/go-flow-metrics v0.1.0 // indirect @@ -122,18 +207,22 @@ require ( github.com/libp2p/go-netroute v0.2.1 // indirect github.com/libp2p/go-reuseport v0.4.0 // indirect github.com/libp2p/go-yamux/v4 v4.0.1 // indirect + github.com/linxGnu/grocksdb v1.8.12 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd // indirect + github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect github.com/miekg/dns v1.1.57 // indirect github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc // indirect + github.com/minio/highwayhash v1.0.2 // indirect github.com/minio/sha256-simd v1.0.1 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/mr-tron/base58 v1.2.0 // indirect + github.com/mtibben/percent v0.2.1 // indirect github.com/multiformats/go-base32 v0.1.0 // indirect github.com/multiformats/go-base36 v0.2.0 // indirect github.com/multiformats/go-multiaddr-dns v0.3.1 // indirect @@ -141,38 +230,58 @@ require ( github.com/multiformats/go-multicodec v0.9.0 // indirect github.com/multiformats/go-multistream v0.5.0 // indirect github.com/multiformats/go-varint v0.0.7 // indirect - github.com/onsi/ginkgo v1.16.5 // indirect + github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a // indirect + github.com/oklog/run v1.1.0 // indirect github.com/onsi/ginkgo/v2 v2.13.2 // indirect github.com/opencontainers/runtime-spec v1.1.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect github.com/pelletier/go-toml/v2 v2.1.0 // indirect github.com/perimeterx/marshmallow v1.1.5 // indirect + github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc // indirect + github.com/piprate/json-gold v0.5.0 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/polydawn/refmt v0.89.0 // indirect + github.com/pquerna/cachecontrol v0.1.0 // indirect github.com/prometheus/client_golang v1.18.0 // indirect - github.com/prometheus/client_model v0.5.0 // indirect - github.com/prometheus/common v0.45.0 // indirect + github.com/prometheus/client_model v0.6.0 // indirect + github.com/prometheus/common v0.47.0 // indirect github.com/prometheus/procfs v0.12.0 // indirect github.com/quic-go/qpack v0.4.0 // indirect github.com/quic-go/qtls-go1-20 v0.4.1 // indirect github.com/quic-go/quic-go v0.40.1 // indirect github.com/quic-go/webtransport-go v0.6.0 // indirect github.com/raulk/go-watchdog v1.3.0 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rogpeppe/go-internal v1.12.0 // indirect + github.com/rs/cors v1.10.1 // indirect + github.com/rs/zerolog v1.32.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect github.com/sourcegraph/conc v0.3.0 // indirect + github.com/sourcenetwork/raccoondb v0.2.0 // indirect + github.com/sourcenetwork/zanzi v0.3.0 // indirect github.com/spaolacci/murmur3 v1.1.0 // indirect github.com/spf13/afero v1.11.0 // indirect github.com/spf13/cast v1.6.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect - github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tendermint/tm-db v0.6.7 // indirect + github.com/teserakt-io/golang-ed25519 v0.0.0-20210104091850-3888c087a4c8 // indirect github.com/textileio/go-log/v2 v2.1.3-gke-2 // indirect github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect github.com/x448/float16 v0.8.4 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect + github.com/zondax/hid v0.9.2 // indirect + github.com/zondax/ledger-go v0.14.3 // indirect + go.etcd.io/bbolt v1.3.8 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/otel v1.24.0 // indirect go.opentelemetry.io/otel/sdk v1.24.0 // indirect @@ -182,15 +291,22 @@ require ( go.uber.org/mock v0.4.0 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/crypto v0.19.0 // indirect - golang.org/x/mod v0.14.0 // indirect + golang.org/x/mod v0.15.0 // indirect golang.org/x/net v0.21.0 // indirect golang.org/x/sync v0.6.0 // indirect golang.org/x/sys v0.17.0 // indirect + golang.org/x/term v0.17.0 // indirect golang.org/x/text v0.14.0 // indirect - golang.org/x/tools v0.16.1 // indirect + golang.org/x/tools v0.18.0 // indirect gonum.org/v1/gonum v0.14.0 // indirect + google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect + gotest.tools/v3 v3.5.1 // indirect lukechampine.com/blake3 v1.2.1 // indirect + nhooyr.io/websocket v1.8.7 // indirect + pgregory.net/rapid v1.1.0 // indirect + sigs.k8s.io/yaml v1.4.0 // indirect ) diff --git a/go.sum b/go.sum index 22c2ef750c..925f4e9590 100644 --- a/go.sum +++ b/go.sum @@ -1,61 +1,228 @@ +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1 h1:tdpHgTbmbvEIARu+bixzmleMi14+3imnpoFXz+Qzjp4= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1/go.mod h1:xafc+XIsTxTy76GJQ1TKgvJWsSugFBqMaN27WhUblew= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.37.0/go.mod h1:TS1dMSSfndXH133OKGwekG838Om/cQT0BUHV3HcBgoo= +cosmossdk.io/api v0.7.3 h1:V815i8YOwOAQa1rLCsSMjVG5Gnzs02JLq+l7ks8s1jk= +cosmossdk.io/api v0.7.3/go.mod h1:IcxpYS5fMemZGqyYtErK7OqvdM0C8kdW3dq8Q/XIG38= +cosmossdk.io/collections v0.4.0 h1:PFmwj2W8szgpD5nOd8GWH6AbYNi1f2J6akWXJ7P5t9s= +cosmossdk.io/collections v0.4.0/go.mod h1:oa5lUING2dP+gdDquow+QjlF45eL1t4TJDypgGd+tv0= +cosmossdk.io/core v0.11.0 h1:vtIafqUi+1ZNAE/oxLOQQ7Oek2n4S48SWLG8h/+wdbo= +cosmossdk.io/core v0.11.0/go.mod h1:LaTtayWBSoacF5xNzoF8tmLhehqlA9z1SWiPuNC6X1w= +cosmossdk.io/depinject v1.0.0-alpha.4 h1:PLNp8ZYAMPTUKyG9IK2hsbciDWqna2z1Wsl98okJopc= +cosmossdk.io/depinject v1.0.0-alpha.4/go.mod h1:HeDk7IkR5ckZ3lMGs/o91AVUc7E596vMaOmslGFM3yU= +cosmossdk.io/errors v1.0.1 h1:bzu+Kcr0kS/1DuPBtUFdWjzLqyUuCiyHjyJB6srBV/0= +cosmossdk.io/errors v1.0.1/go.mod h1:MeelVSZThMi4bEakzhhhE/CKqVv3nOJDA25bIqRDu/U= +cosmossdk.io/log v1.3.1 h1:UZx8nWIkfbbNEWusZqzAx3ZGvu54TZacWib3EzUYmGI= +cosmossdk.io/log v1.3.1/go.mod h1:2/dIomt8mKdk6vl3OWJcPk2be3pGOS8OQaLUM/3/tCM= +cosmossdk.io/math v1.2.0 h1:8gudhTkkD3NxOP2YyyJIYYmt6dQ55ZfJkDOaxXpy7Ig= +cosmossdk.io/math v1.2.0/go.mod h1:l2Gnda87F0su8a/7FEKJfFdJrM0JZRXQaohlgJeyQh0= +cosmossdk.io/store v1.0.2 h1:lSg5BTvJBHUDwswNNyeh4K/CbqiHER73VU4nDNb8uk0= +cosmossdk.io/store v1.0.2/go.mod h1:EFtENTqVTuWwitGW1VwaBct+yDagk7oG/axBMPH+FXs= +cosmossdk.io/x/tx v0.13.0 h1:8lzyOh3zONPpZv2uTcUmsv0WTXy6T1/aCVDCqShmpzU= +cosmossdk.io/x/tx v0.13.0/go.mod h1:CpNQtmoqbXa33/DVxWQNx5Dcnbkv2xGUhL7tYQ5wUsY= dmitri.shuralyov.com/app/changes v0.0.0-20180602232624-0a106ad413e3/go.mod h1:Yl+fi1br7+Rr3LqpNJf1/uxUdtRUV+Tnj0o93V2B9MU= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBrvjyP0v+ecvNYvCpyZgu5/xkfAUhi6wJj28eUfSU= dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1:a1inKt/atXimZ4Mv927x+r7UpyzRUf4emIoiiSC2TN4= dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU= +filippo.io/edwards25519 v1.0.0 h1:0wAIcmJUqRdI8IJ/3eGi5/HwXZWPujYXXlkrQogz0Ek= +filippo.io/edwards25519 v1.0.0/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns= git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= +github.com/99designs/keyring v1.2.1 h1:tYLp1ULvO7i3fI5vE21ReQuj99QFSs7lGm0xWyJo87o= +github.com/99designs/keyring v1.2.1/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA= github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M= github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM= -github.com/DataDog/zstd v1.4.1/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible h1:qSG2N4FghB1He/r2mFrWKCaL7dXCilEuNEeAn20fdD4= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= +github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= github.com/Jorropo/jsync v1.0.1 h1:6HgRolFZnsdfzRUj+ImB9og1JYOxQoReSywkHOGSaUU= github.com/Jorropo/jsync v1.0.1/go.mod h1:jCOZj3vrBCri3bSU3ErUYvevKlnbssrXeCivybS5ABQ= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/NathanBaulch/protoc-gen-cobra v1.2.1 h1:BOqX9glwicbqDJDGndMnhHhx8psGTSjGdZzRDY1a7A8= +github.com/NathanBaulch/protoc-gen-cobra v1.2.1/go.mod h1:ZLPLEPQgV3jP3a7IEp+xxYPk8tF4lhY9ViV0hn6K3iA= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= +github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/VividCortex/gohistogram v1.0.0 h1:6+hBz+qvs0JOrrNhhmR7lFxo5sINxBCGXrdtl/UvroE= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/adlio/schema v1.3.3 h1:oBJn8I02PyTB466pZO1UZEn1TV5XLlifBSyMrmHl/1I= +github.com/adlio/schema v1.3.3/go.mod h1:1EsRssiv9/Ce2CMzq5DoL7RiMshhuigQxrR4DMV9fHg= +github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 h1:ez/4by2iGztzR4L0zgAOR8lTQK9VlyBVVd7G4omaOQs= github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/awalterschulze/gographviz v2.0.3+incompatible h1:9sVEXJBJLwGX7EQVhLm2elIKCm7P2YHFC8v6096G09E= +github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw7v1kkyoX9etIk8yVmXj+AkDHuuETHs= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/benbjohnson/clock v1.3.5 h1:VvXlSJBzZpA/zum6Sj74hxwYI2DIxRWuNIoXAzHZz5o= github.com/benbjohnson/clock v1.3.5/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 h1:41iFGWnSlI2gVpmOtVTJZNodLdLQLn/KsJqFvXwnd/s= +github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bits-and-blooms/bitset v1.13.0 h1:bAQ9OPNFYbGHV6Nez0tmNI0RiEu7/hxlYJRUA0wFAVE= github.com/bits-and-blooms/bitset v1.13.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= +github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= +github.com/btcsuite/btcd v0.22.1 h1:CnwP9LM/M9xuRrGSCGeMVs9iv09uMqwsVX7EeIpgV2c= +github.com/btcsuite/btcd v0.22.1/go.mod h1:wqgTSL29+50LRkmOVknEdmt8ZojIzhuWvgu/iptuN7Y= +github.com/btcsuite/btcd/btcec/v2 v2.3.2 h1:5n0X6hX0Zk+6omWcihdYvdAlGf2DfasC0GMf7DClJ3U= +github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= +github.com/btcsuite/btcd/btcutil v1.1.3 h1:xfbtw8lwpp0G6NwSHb+UE67ryTFHJAiNuipusjXSohQ= +github.com/btcsuite/btcd/btcutil v1.1.3/go.mod h1:UR7dsSJzJUfMmFiiLlIrMq1lS9jh9EdCV7FStZSnpi0= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.2 h1:KdUfX2zKommPRa+PD0sWZUyXe9w277ABlgELO7H04IM= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.2/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= +github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= +github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= +github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce h1:YtWJF7RHm2pYCvA5t0RPmAaLUhREsKuKd+SLhxFbFeQ= +github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce/go.mod h1:0DVlHczLPewLcPGEIeUEzfOJhqGPQ0mJJRDBtD307+o= +github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= +github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY= +github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= +github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= +github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= +github.com/bufbuild/protocompile v0.6.0 h1:Uu7WiSQ6Yj9DbkdnOe7U4mNKp58y9WDMKDn28/ZlunY= +github.com/bufbuild/protocompile v0.6.0/go.mod h1:YNP35qEYoYGme7QMtz5SBCoN4kL4g12jTtjuzRNdjpE= github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bxcodec/faker v2.0.1+incompatible h1:P0KUpUw5w6WJXwrPfv35oc91i4d8nf40Nwln+M/+faA= github.com/bxcodec/faker v2.0.1+incompatible/go.mod h1:BNzfpVdTwnFJ6GtfYTcQu6l6rHShT+veBxNCnjCx5XM= github.com/bytecodealliance/wasmtime-go/v15 v15.0.0 h1:4R2MpSPPbtSxqdsOTvsMn1pnwdEhzbDGMao6LUUSLv4= github.com/bytecodealliance/wasmtime-go/v15 v15.0.0/go.mod h1:m6vB/SsM+pnJkVHmO1wzHYUeYtciltTKuxuvkR8pYcY= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/apd/v2 v2.0.2 h1:weh8u7Cneje73dDh+2tEVLUvyBc89iwepWCD8b8034E= +github.com/cockroachdb/apd/v2 v2.0.2/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4= +github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= +github.com/cockroachdb/errors v1.11.1 h1:xSEW75zKaKCWzR3OfxXUxgrk/NtT4G1MiOv5lWZazG8= +github.com/cockroachdb/errors v1.11.1/go.mod h1:8MUxA3Gi6b25tYlFEBGLf+D8aISL+M4MIpiWMSNRfxw= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cockroachdb/pebble v1.1.0 h1:pcFh8CdCIt2kmEpK0OIatq67Ln9uGDYY3d5XnE0LJG4= +github.com/cockroachdb/pebble v1.1.0/go.mod h1:sEHm5NOXxyiAoKWhoFxT8xMgd/f3RA6qUqQ1BXKrh2E= +github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= +github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= +github.com/cometbft/cometbft v0.38.5 h1:4lOcK5VTPrfbLOhNHmPYe6c7eDXHtBdMCQuKbAfFJdU= +github.com/cometbft/cometbft v0.38.5/go.mod h1:0tqKin+KQs8zDwzYD8rPHzSBIDNPuB4NrwwGDNb/hUg= +github.com/cometbft/cometbft-db v0.9.1 h1:MIhVX5ja5bXNHF8EYrThkG9F7r9kSfv8BX4LWaxWJ4M= +github.com/cometbft/cometbft-db v0.9.1/go.mod h1:iliyWaoV0mRwBJoizElCwwRA9Tf7jZJOURcRZF9m60U= github.com/containerd/cgroups v0.0.0-20201119153540-4cbc285b3327/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM= github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw= +github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= +github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cosmos/btcutil v1.0.5 h1:t+ZFcX77LpKtDBhjucvnOH8C2l2ioGsBNEQ3jef8xFk= +github.com/cosmos/btcutil v1.0.5/go.mod h1:IyB7iuqZMJlthe2tkIFL33xPyzbFYP0XVdS8P5lUPis= +github.com/cosmos/cosmos-db v1.0.0 h1:EVcQZ+qYag7W6uorBKFPvX6gRjw6Uq2hIh4hCWjuQ0E= +github.com/cosmos/cosmos-db v1.0.0/go.mod h1:iBvi1TtqaedwLdcrZVYRSSCb6eSy61NLj4UNmdIgs0U= +github.com/cosmos/cosmos-proto v1.0.0-beta.4 h1:aEL7tU/rLOmxZQ9z4i7mzxcLbSCY48OdY7lIWTLG7oU= +github.com/cosmos/cosmos-proto v1.0.0-beta.4/go.mod h1:oeB+FyVzG3XrQJbJng0EnV8Vljfk9XvTIpGILNU/9Co= +github.com/cosmos/cosmos-sdk v0.50.4 h1:hQT5/+Z1XXNF7skaPq0i247Ts2dzzqg/j2WO/BPHSto= +github.com/cosmos/cosmos-sdk v0.50.4/go.mod h1:UbShFs6P8Ly29xxJvkNGaNaL/UGj5a686NRtb1Cqra0= +github.com/cosmos/go-bip39 v1.0.0 h1:pcomnQdrdH22njcAatO0yWojsUnCO3y2tNoV1cb6hHY= +github.com/cosmos/go-bip39 v1.0.0/go.mod h1:RNJv0H/pOIVgxw6KS7QeX2a0Uo0aKUlfhZ4xuwvCdJw= +github.com/cosmos/gogogateway v1.2.0 h1:Ae/OivNhp8DqBi/sh2A8a1D0y638GpL3tkmLQAiKxTE= +github.com/cosmos/gogogateway v1.2.0/go.mod h1:iQpLkGWxYcnCdz5iAdLcRBSw3h7NXeOkZ4GUkT+tbFI= +github.com/cosmos/gogoproto v1.4.2/go.mod h1:cLxOsn1ljAHSV527CHOtaIP91kK6cCrZETRBrkzItWU= +github.com/cosmos/gogoproto v1.4.11 h1:LZcMHrx4FjUgrqQSWeaGC1v/TeuVFqSLa43CC6aWR2g= +github.com/cosmos/gogoproto v1.4.11/go.mod h1:/g39Mh8m17X8Q/GDEs5zYTSNaNnInBSohtaxzQnYq1Y= +github.com/cosmos/gorocksdb v1.2.0 h1:d0l3jJG8M4hBouIZq0mDUHZ+zjOx044J3nGRskwTb4Y= +github.com/cosmos/gorocksdb v1.2.0/go.mod h1:aaKvKItm514hKfNJpUJXnnOWeBnk2GL4+Qw9NHizILw= +github.com/cosmos/iavl v1.0.1 h1:D+mYbcRO2wptYzOM1Hxl9cpmmHU1ZEt9T2Wv5nZTeUw= +github.com/cosmos/iavl v1.0.1/go.mod h1:8xIUkgVvwvVrBu81scdPty+/Dx9GqwHnAvXz4cwF7RY= +github.com/cosmos/ics23/go v0.10.0 h1:iXqLLgp2Lp+EdpIuwXTYIQU+AiHj9mOC2X9ab++bZDM= +github.com/cosmos/ics23/go v0.10.0/go.mod h1:ZfJSmng/TBNTBkFemHHHj5YY7VAU/MBU980F4VU1NG0= +github.com/cosmos/ledger-cosmos-go v0.13.3 h1:7ehuBGuyIytsXbd4MP43mLeoN2LTOEnk5nvue4rK+yM= +github.com/cosmos/ledger-cosmos-go v0.13.3/go.mod h1:HENcEP+VtahZFw38HZ3+LS3Iv5XV6svsnkk9vdJtLr8= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668 h1:ZFUue+PNxmHlu7pYv+IYMtqlaO/0VwaGEqKepZf9JpA= github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668/go.mod h1:p1d6YEZWvFzEh4KLyvBcVSnrfNDDvK2zfK/4x2v/4pE= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cskr/pubsub v1.0.2 h1:vlOzMhl6PFn60gRlTQQsIfVwaPB/B/8MziK8FhEPt/0= github.com/cskr/pubsub v1.0.2/go.mod h1:/8MzYXk/NJAz782G8RPkFzXTZVu63VotefPnR9TIRis= +github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= +github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= +github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= @@ -66,45 +233,91 @@ github.com/decred/dcrd/crypto/blake256 v1.0.1 h1:7PltbUIQB7u/FfZ39+DGa/ShuMyJ5il github.com/decred/dcrd/crypto/blake256 v1.0.1/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 h1:8UrgZ3GkP4i/CLijOJx79Yu+etlyjdBU4sfcs2WYQMs= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0= +github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f h1:U5y3Y5UE0w7amNe7Z5G/twsBW0KEalRQXZzf8ufSh9I= +github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f/go.mod h1:xH/i4TFMt8koVQZ6WFms69WAsDWr2XsYL3Hkl7jkoLE= github.com/dgraph-io/badger v1.6.2 h1:mNw0qs90GVgGGWylh0umH5iag1j6n/PeJtNvL6KY/x8= github.com/dgraph-io/badger v1.6.2/go.mod h1:JW2yswe3V058sS0kZ2h/AXeDSqFjxnZcRrVH//y2UQE= +github.com/dgraph-io/badger/v2 v2.2007.4 h1:TRWBQg8UrlUhaFdco01nO2uXwzKS7zd+HVdwV/GHc4o= +github.com/dgraph-io/badger/v2 v2.2007.4/go.mod h1:vSw/ax2qojzbN6eXHIx6KPKtCSHJN/Uz0X0VPruTIhk= github.com/dgraph-io/badger/v3 v3.2011.1 h1:Hmyof0WMEF/QtutX5SQHzIMnJQxb/IrSzhjckV2SD6g= github.com/dgraph-io/badger/v3 v3.2011.1/go.mod h1:0rLLrQpKVQAL0or/lBLMQznhr6dWWX7h5AKnmnqx268= +github.com/dgraph-io/ristretto v0.0.3-0.20200630154024-f66de99634de/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY= +github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= github.com/elastic/gosigar v0.12.0/go.mod h1:iXRIGg2tLnu7LBdpqzyQfGDEidKCfWcCMS0WKyPWoMs= github.com/elastic/gosigar v0.14.2 h1:Dg80n8cr90OZ7x+bAax/QjoW/XqTI11RmA79ZwIm9/4= github.com/elastic/gosigar v0.14.2/go.mod h1:iXRIGg2tLnu7LBdpqzyQfGDEidKCfWcCMS0WKyPWoMs= +github.com/emicklei/dot v1.6.1 h1:ujpDlBkkwgWUY+qPId5IwapRW/xEoligRSYjioR6DFI= +github.com/emicklei/dot v1.6.1/go.mod h1:DeV7GvQtIw4h2u73RKBkkFdvVAz0D9fzeJrgPW6gy/s= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch/v5 v5.9.0 h1:kcBlZQbplgElYIlo/n1hJbls2z/1awpXxpRi0/FOJfg= github.com/evanphx/json-patch/v5 v5.9.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ= +github.com/facebookgo/ensure v0.0.0-20200202191622-63f1cf65ac4c h1:8ISkoahWXwZR41ois5lSJBSVw4D0OV19Ht/JSTzvSv0= +github.com/facebookgo/ensure v0.0.0-20200202191622-63f1cf65ac4c/go.mod h1:Yg+htXGokKKdzcwhuNDwVvN+uBxDGXJ7G/VN1d8fa64= +github.com/facebookgo/stack v0.0.0-20160209184415-751773369052 h1:JWuenKqqX8nojtoVVWjGfOF9635RETekkoH6Cc9SX0A= +github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg= +github.com/facebookgo/subset v0.0.0-20200203212716-c811ad88dec4 h1:7HZCaLC5+BZpmbhCOZJ293Lz68O7PYrF2EzeiFMwCLk= +github.com/facebookgo/subset v0.0.0-20200203212716-c811ad88dec4/go.mod h1:5tD+neXqOorC30/tWg0LCSkrqj/AR6gu8yY8/fpw1q0= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/flynn/noise v1.0.1 h1:vPp/jdQLXC6ppsXSj/pM3W1BIJ5FEHE2TulSJBpb43Y= github.com/flynn/noise v1.0.1/go.mod h1:xbMo+0i6+IGbYdJhF31t2eR1BIU0CYc12+BNAKwUTag= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/francoispqt/gojay v1.2.13 h1:d2m3sFjloqoIUQU3TsHBgj6qg/BVGlTBeHDUmyJnXKk= github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiDsoyrBGkyDY= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fxamacker/cbor/v2 v2.6.0 h1:sU6J2usfADwWlYDAFhZBQ6TnLFBHxgesMrQfQgk1tWA= github.com/fxamacker/cbor/v2 v2.6.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= github.com/getkin/kin-openapi v0.123.0 h1:zIik0mRwFNLyvtXK274Q6ut+dPh6nlxBp0x7mNrPhs8= github.com/getkin/kin-openapi v0.123.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= +github.com/getsentry/sentry-go v0.27.0 h1:Pv98CIbtB3LkMWmXi4Joa5OOcwbmnX88sF5qbK3r3Ps= +github.com/getsentry/sentry-go v0.27.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= +github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8= +github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s= github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= @@ -113,6 +326,22 @@ github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vz github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-jose/go-jose/v3 v3.0.1-0.20221117193127-916db76e8214 h1:w5li6eMV6NCHh1YVbKRM/gMCVtZ2w7mnwq78eNnHXQQ= +github.com/go-jose/go-jose/v3 v3.0.1-0.20221117193127-916db76e8214/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-kit/kit v0.12.0 h1:e4o3o3IsBfAKQh5Qbbiqyfu97Ku7jrO/JbohvztANh4= +github.com/go-kit/kit v0.12.0/go.mod h1:lHd+EkCZPIwYItmGDDRdhinkzX2A1sj+M9biaEaizzs= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU= +github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4= +github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -122,34 +351,72 @@ github.com/go-openapi/jsonpointer v0.20.2 h1:mQc3nmndL8ZBzStEo3JYF8wzmeWffDH4VbX github.com/go-openapi/jsonpointer v0.20.2/go.mod h1:bHen+N0u1KEO3YlmqOjTT9Adn1RfD91Ar825/PuiRVs= github.com/go-openapi/swag v0.22.8 h1:/9RjDSQ0vbFR+NyjGMkFTsA1IA0fmhKSThmfGZjicbw= github.com/go-openapi/swag v0.22.8/go.mod h1:6QT22icPLEqAM/z/TChgb4WAveCHF92+2gF0CNjHpPI= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= +github.com/go-playground/validator/v10 v10.13.0 h1:cFRQdfaSMCOSfGCCLB20MHvuoHb/s5G8L5pu2ppK5AQ= +github.com/go-playground/validator/v10 v10.13.0/go.mod h1:dwu7+CG8/CtBiJFZDz4e+5Upb6OLw04gtBYw0mcG/z4= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-yaml/yaml v2.1.0+incompatible/go.mod h1:w2MrLa16VYP0jy6N7M5kHaCkaLENm+P+Tv+MfurjSw0= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= +github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= +github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/gobwas/ws v1.2.1 h1:F2aeBZrm2NDsc7vbovKrWSogd4wvfAxg0FQ89/iqOTk= +github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/goccy/go-json v0.9.11 h1:/pAaQDLHEoCq/5FFmSKBswWmK6H0e8g4159Kc/X/nqk= +github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gofrs/uuid/v5 v5.0.0 h1:p544++a97kEL+svbcFbCQVM9KFu0Yo25UoISXGNNH9M= github.com/gofrs/uuid/v5 v5.0.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/googleapis v1.4.1-0.20201022092350-68b0159b7869/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.0 h1:uCdmnmatrKCgMBlM4rMuJZWOkPDqdbZPnrMXDY4gI68= github.com/golang/glog v1.2.0/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.0/go.mod h1:Qd/q+1AKNOZr9uGQzbzCmRO6sUih6GTPZv6a1/R87v0= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -159,11 +426,17 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.1.2 h1:xf4v41cLI2Z6FxbKm+8Bu+m8ifhj15JuZ9sa0jZCMUU= +github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= github.com/google/flatbuffers v2.0.6+incompatible h1:XHFReMv7nFFusa+CEokzWbzaYocKXI6C7hdU5Kgh9Lw= github.com/google/flatbuffers v2.0.6+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -174,17 +447,29 @@ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gopacket v1.1.19 h1:ves8RnFZPGiFnTS0uPQStjwru6uO6h+nlr9j6fL7kF8= github.com/google/gopacket v1.1.19/go.mod h1:iJ8V8n6KS+z2U1A8pUwu8bW5SyEMkXJB8Yo/Vo+TKTo= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/orderedcode v0.0.1 h1:UzfcAexk9Vhv8+9pNOgRu41f16lHq725vPwnSeiG/Us= +github.com/google/orderedcode v0.0.1/go.mod h1:iVyU4/qPKHY5h/wSd6rZZCDcLJNxiWO6dvsYES2Sb20= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42 h1:dHLYa5D8/Ta0aLR2XcPsrkpAgGeFs6thhMcQK0oQ0n8= github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/tink/go v1.7.0 h1:6Eox8zONGebBFcCBqkVmt60LaWZa6xg1cl/DwAh/J1w= +github.com/google/tink/go v1.7.0/go.mod h1:GAUOd+QE3pgj9q8VKIGTCP33c/B7eb4NhxLcgTJZStM= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -193,15 +478,62 @@ github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE0 github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20190812055157-5d271430af9f h1:KMlcu9X58lhTA/KrfX8Bi1LQSO4pzoVjTiL3h4Jk+Zk= github.com/gopherjs/gopherjs v0.0.0-20190812055157-5d271430af9f/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE= +github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= +github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 h1:Wqo399gCIufwto+VfwCSvsnfGpF/w5E9CNxSwbpD6No= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0/go.mod h1:qmOFXW2epJhM0qSnUUYpldc7gVz2KMQwJ/QYCDIa7XU= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= +github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-metrics v0.5.2 h1:ErEYO2f//CjKsUDw4SmLzelsK6L3ZmOAR/4P9iS7ruY= +github.com/hashicorp/go-metrics v0.5.2/go.mod h1:KEjodfebIOuBYSAe/bHTm+HChmKSxAOXPBieMLYozDE= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-plugin v1.5.2 h1:aWv8eimFqWlsEiMrYZdPYl+FdHaBJSN4AWwGWfT1G2Y= +github.com/hashicorp/go-plugin v1.5.2/go.mod h1:w1sAEES3g3PuV/RzUrgow20W2uErMly84hhD3um1WL4= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/golang-lru/arc/v2 v2.0.5 h1:l2zaLDubNhW4XO3LnliVj0GXO3+/CGNJAg1dcN2Fpfw= @@ -210,15 +542,47 @@ github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= +github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= +github.com/hdevalence/ed25519consensus v0.1.0 h1:jtBwzzcHuTmFrQN6xQZn6CQEO/V9f7HsjsjeEZ6auqU= +github.com/hdevalence/ed25519consensus v0.1.0/go.mod h1:w3BHWjwJbFU29IRHL1Iqkw3sus+7FctEyM4RqDxYNzo= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/hsanjuan/ipfs-lite v1.8.1 h1:Rpd9bTXYgkmnt8M5QsZnWwtW6ebxAB7HlU/d0zE4BmA= github.com/hsanjuan/ipfs-lite v1.8.1/go.mod h1:oGCaHBi+I73UFjc6wPAQ75hr4FjJhoqy6YPZjtghDIc= +github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= +github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= +github.com/huandu/skiplist v1.2.0 h1:gox56QD77HzSC0w+Ws3MH3iie755GBJU1OER3h5VsYw= +github.com/huandu/skiplist v1.2.0/go.mod h1:7v3iFjLcSAzO4fN5B8dvebvo/qsfumiLiDXMrPiHF9w= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= github.com/huin/goupnp v1.3.0 h1:UvLUlWDNpoUdYzb2TCn+MuTWtcjXKSza2n6CBdQ0xXc= github.com/huin/goupnp v1.3.0/go.mod h1:gnGPsThkYa7bFi/KWmEysQRf48l2dvR5bxr2OFckNX8= +github.com/hyperledger/aries-framework-go v0.3.2 h1:GsSUaSEW82cr5X8b3Qf90GAi37kmTKHqpPJLhar13X8= +github.com/hyperledger/aries-framework-go v0.3.2/go.mod h1:SorUysWEBw+uyXhY5RAtg2iyNkWTIIPM8+Slkt1Spno= +github.com/hyperledger/aries-framework-go/component/kmscrypto v0.0.0-20230427134832-0c9969493bd3 h1:PCbDSujjQ6oTEnAHgtThNmbS7SPAYEDBlKOnZFE+Ujw= +github.com/hyperledger/aries-framework-go/component/kmscrypto v0.0.0-20230427134832-0c9969493bd3/go.mod h1:aEk0vHBmZsAdDfXaI12Kg5ipZGiB3qNqgbPt/e/Hm2s= +github.com/hyperledger/aries-framework-go/component/log v0.0.0-20230427134832-0c9969493bd3 h1:x5qFQraTX86z9GCwF28IxfnPm6QH5YgHaX+4x97Jwvw= +github.com/hyperledger/aries-framework-go/component/log v0.0.0-20230427134832-0c9969493bd3/go.mod h1:CvYs4l8X2NrrF93weLOu5RTOIJeVdoZITtjEflyuTyM= +github.com/hyperledger/aries-framework-go/component/models v0.0.0-20230501135648-a9a7ad029347 h1:oPGUCpmnm7yxsVllcMQnHF3uc3hy4jfrSCh7nvzXA00= +github.com/hyperledger/aries-framework-go/component/models v0.0.0-20230501135648-a9a7ad029347/go.mod h1:nF8fHsYY+GZl74AFAQaKAhYWOOSaLVzW/TZ0Sq/6axI= +github.com/hyperledger/aries-framework-go/component/storageutil v0.0.0-20230427134832-0c9969493bd3 h1:JGYA9l5zTlvsvfnXT9hYPpCokAjmVKX0/r7njba7OX4= +github.com/hyperledger/aries-framework-go/component/storageutil v0.0.0-20230427134832-0c9969493bd3/go.mod h1:aSG2dWjYVzu2PVBtOqsYghaChA5+UUXnBbL+MfVceYQ= +github.com/hyperledger/aries-framework-go/spi v0.0.0-20230427134832-0c9969493bd3 h1:ytWmOQZIYQfVJ4msFvrqlp6d+ZLhT43wS8rgE2m+J1A= +github.com/hyperledger/aries-framework-go/spi v0.0.0-20230427134832-0c9969493bd3/go.mod h1:oryUyWb23l/a3tAP9KW+GBbfcfqp9tZD4y5hSkFrkqI= +github.com/hyperledger/ursa-wrapper-go v0.3.1 h1:Do+QrVNniY77YK2jTIcyWqj9rm/Yb5SScN0bqCjiibA= +github.com/hyperledger/ursa-wrapper-go v0.3.1/go.mod h1:nPSAuMasIzSVciQo22PedBk4Opph6bJ6ia3ms7BH/mk= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/improbable-eng/grpc-web v0.15.0 h1:BN+7z6uNXZ1tQGcNAuaU1YjsLTApzkjt2tzCixLaUPQ= +github.com/improbable-eng/grpc-web v0.15.0/go.mod h1:1sy9HKV4Jt9aEs9JSnkWlRJPuPtwNr0l57L4f878wP8= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= github.com/invopop/yaml v0.2.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q= github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs= @@ -276,21 +640,48 @@ github.com/jbenet/go-temp-err-catcher v0.1.0/go.mod h1:0kJRvmDZXNMIiJirNPEYfhpPw github.com/jbenet/goprocess v0.1.4 h1:DRGOFReOMqqDNXwW70QkacFW0YN9QnwLV0Vqk+3oU0o= github.com/jbenet/goprocess v0.1.4/go.mod h1:5yspPrukOVuOLORacaBi858NqyClJPQxYZlqdZVfqY4= github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU= +github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jhump/protoreflect v1.15.3 h1:6SFRuqU45u9hIZPJAoZ8c28T3nK64BNdp9w6jFonzls= +github.com/jhump/protoreflect v1.15.3/go.mod h1:4ORHmSBmlCW8fh3xHmJMGyul1zNqZK4Elxc8qKP+p1k= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmhodges/levigo v1.0.0 h1:q5EC36kV79HWeTBWsod3mG11EgStG3qArTKcvlksN1U= +github.com/jmhodges/levigo v1.0.0/go.mod h1:Q6Qx+uH3RAqyK4rFQroq9RL7mdkABMcfhEI+nNuzMJQ= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69 h1:kMJlf8z8wUcpyI+FQJIdGjAhfTww1y0AbQEv86bpVQI= +github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69/go.mod h1:tlkavyke+Ac7h8R3gZIjI5LKBcvMlSWnXNMgT3vZXo8= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= -github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= +github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= +github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/koron/go-ssdp v0.0.4 h1:1IDwrghSKYM7yLf7XCzbByg2sJ/JcNOZRXS2jczTwz0= github.com/koron/go-ssdp v0.0.4/go.mod h1:oDXq+E5IL5q0U8uSBcoAXzTzInwy5lEgC91HoKtbmZk= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -302,6 +693,11 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c h1:bG+mr4SqbYRU69L6CSvHDsKbRg5Q9vaN2T5g7qcrPdQ= github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c/go.mod h1:a4edl+KcOVk1Nj3EjG77htqg2/0Mmy3bSG0kl+FWVqQ= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= +github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8= github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg= github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38yPW7c= @@ -338,21 +734,39 @@ github.com/libp2p/go-yamux/v4 v4.0.1 h1:FfDR4S1wj6Bw2Pqbc8Uz7pCxeRBPbwsBbEdfwiCy github.com/libp2p/go-yamux/v4 v4.0.1/go.mod h1:NWjl8ZTLOGlozrXSOZ/HlfG++39iKNnM5wwmtQP1YB4= github.com/libp2p/zeroconf/v2 v2.2.0 h1:Cup06Jv6u81HLhIj1KasuNM/RHHrJ8T7wOTS4+Tv53Q= github.com/libp2p/zeroconf/v2 v2.2.0/go.mod h1:fuJqLnUwZTshS3U/bMRJ3+ow/v9oid1n0DmyYyNO1Xs= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/linxGnu/grocksdb v1.8.12 h1:1/pCztQUOa3BX/1gR3jSZDoaKFpeHFvQ1XrqZpSvZVo= +github.com/linxGnu/grocksdb v1.8.12/go.mod h1:xZCIb5Muw+nhbDK4Y5UJuOrin5MceOuiXkVUR7vp4WY= github.com/lunixbochs/vtclean v1.0.0/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= +github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd h1:br0buuQ854V8u83wA0rVZ8ttrq5CpaPZdvrK0LP2lOk= github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd/go.mod h1:QuCEs1Nt24+FYQEqAAncTDPJIuGs+LxK1MCiFL25pMU= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg= -github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k= github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= github.com/miekg/dns v1.1.57 h1:Jzi7ApEIzwEPLHWRcafCN9LZSBbqQpxjt/wpgvg7wcM= github.com/miekg/dns v1.1.57/go.mod h1:uqRjCRUuEAA6qsOiJvDd+CFo/vW+y5WR6SNmHE55hZk= @@ -363,20 +777,38 @@ github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b/go.mod h1:lxPUiZwKo github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc h1:PTfri+PuQmWDqERdnNMiD9ZejrlswWrCpBEZgWOiTrc= github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc/go.mod h1:cGKTAVKx4SxOuR/czcZ/E2RSJ3sfHs8FpHhQ5CWMf9s= github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1/go.mod h1:pD8RvIylQ358TN4wwqatJ8rNavkEINozVn9DtGI3dfQ= +github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g= +github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= github.com/minio/sha256-simd v0.1.1-0.20190913151208-6de447530771/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM= github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= +github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/mr-tron/base58 v1.1.2/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o= github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= +github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= +github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= github.com/multiformats/go-base32 v0.1.0 h1:pVx9xoSPqEIQG8o+UbAe7DNi51oej1NtK+aGkbLYxPE= github.com/multiformats/go-base32 v0.1.0/go.mod h1:Kj3tFY6zNr+ABYMqeUNeGvkIC/UYgtWibDcT0rExnbI= github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9rQyccr0= @@ -401,55 +833,140 @@ github.com/multiformats/go-multistream v0.5.0/go.mod h1:n6tMZiwiP2wUsR8DgfDWw1dy github.com/multiformats/go-varint v0.0.1/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE= github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/nEGOHFS8= github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/grpc-proxy v0.0.0-20181017164139-0f1106ef9c76/go.mod h1:x5OoJHDHqxHS801UIuhqGl6QdSAEJvtausosHSdazIo= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a h1:dlRvE5fWabOchtH7znfiFCcOvmIYgOeAS5ifBXBlh9Q= +github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a/go.mod h1:hVoHR2EVESiICEMbg137etN/Lx+lSrHPTD39Z/uE+2s= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= +github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/ginkgo/v2 v2.13.2 h1:Bi2gGVkfn6gQcjNjZJVO8Gf0FHzMPf2phUei9tejVMs= github.com/onsi/ginkgo/v2 v2.13.2/go.mod h1:XStQ8QcGwLyF4HdfcZB8SFOS/MWCgDuXMSBe6zrvLgM= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= github.com/onsi/gomega v1.29.0 h1:KIA/t2t5UBzoirT4H9tsML45GEbo3ouUnBHsCfD2tVg= github.com/onsi/gomega v1.29.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/cbdlwvlWt0pnFI= +github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= +github.com/opencontainers/runc v1.1.3 h1:vIXrkId+0/J2Ymu2m7VjGvbSlAId9XNRPhn2p4b+d8w= +github.com/opencontainers/runc v1.1.3/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg= github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/ory/dockertest v3.3.5+incompatible h1:iLLK6SQwIhcbrG783Dghaaa3WPzGc+4Emza6EbVUUGA= +github.com/ory/dockertest v3.3.5+incompatible/go.mod h1:1vX4m9wsvi00u5bseYwXaSnhNrne+V0E6LAcBILJdPs= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= +github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o= +github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc h1:8bQZVK1X6BJR/6nYUPxQEP+ReTsceJTKizeuwjWOPUA= +github.com/petermattis/goid v0.0.0-20230904192822-1876fd5063bc/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/piprate/json-gold v0.5.0 h1:RmGh1PYboCFcchVFuh2pbSWAZy4XJaqTMU4KQYsApbM= +github.com/piprate/json-gold v0.5.0/go.mod h1:WZ501QQMbZZ+3pXFPhQKzNwS1+jls0oqov3uQ2WasLs= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/polydawn/refmt v0.89.0 h1:ADJTApkvkeBZsN0tBTx8QjpD9JkmxbKp0cxfr9qszm4= github.com/polydawn/refmt v0.89.0/go.mod h1:/zvteZs/GwLtCgZ4BL6CBsk9IKIlexP43ObX9AxTqTw= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8cTqKc= +github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI= github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk= github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= -github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.0 h1:k1v3CzpSRUTrKMppY35TLwPvxHqBu0bYgxZzqGIgaos= +github.com/prometheus/client_model v0.6.0/go.mod h1:NTQHnmxFpouOD0DpvP4XujX3CdOAGQPoaGhyTchlyt8= github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lneoxM= -github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= +github.com/prometheus/common v0.47.0 h1:p5Cz0FNHo7SnWOmWmoRozVcjEp0bIVU8cV7OShpjL1k= +github.com/prometheus/common v0.47.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.3.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/quic-go/qpack v0.4.0 h1:Cr9BXA1sQS2SmDUWjSofMPNKmvF6IiIfDRmgU0w1ZCo= @@ -462,19 +979,36 @@ github.com/quic-go/webtransport-go v0.6.0 h1:CvNsKqc4W2HljHJnoT+rMmbRJybShZ0YPFD github.com/quic-go/webtransport-go v0.6.0/go.mod h1:9KjU4AEBqEQidGHNDkZrb8CAa1abRaosM2yGOyiikEc= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= github.com/raulk/go-watchdog v1.3.0/go.mod h1:fIvOnLbF0b0ZwkB9YU4mOW9Did//4vPZtDqv66NfsMU= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/rs/cors v1.10.1 h1:L0uuZVXIKlI1SShY2nhFfo44TYvDPQ1w4oFkUJNfhyo= +github.com/rs/cors v1.10.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= +github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA= github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0= +github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/shurcooL/component v0.0.0-20170202220835-f88ec8f54cc4/go.mod h1:XhFIlyj5a1fBNx5aJTbKoIq0mNaPvOagO+HjB3EtxrY= github.com/shurcooL/events v0.0.0-20181021180414-410e4ca65f48/go.mod h1:5u70Mqkb5O5cxEA8nxTsgrgLehJeAw6Oc4Ab1c/P1HM= @@ -499,11 +1033,20 @@ github.com/shurcooL/sanitized_anchor_name v0.0.0-20170918181015-86672fcb3f95/go. github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/shurcooL/users v0.0.0-20180125191416-49c67e49c537/go.mod h1:QJTqeLYEDaXHZDBsXlPCDqdhQuJkuw4NOtaxYe3xii4= github.com/shurcooL/webdavfs v0.0.0-20170829043945-18c3829fa133/go.mod h1:hKmq5kWdCj2z2KEozexVbfEZIWiTjhE0+UjmZgPqehw= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs= github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg= github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= @@ -518,19 +1061,38 @@ github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd h1:lmp github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd/go.mod h1:rkahXkgRH/3vZErN1Bx+qt1+w+CV5fgaJyKKWgISe4U= github.com/sourcenetwork/immutable v0.3.0 h1:gHPtGvLrTBTK5YpDAhMU+u+S8v1F6iYmc3nbZLryMdc= github.com/sourcenetwork/immutable v0.3.0/go.mod h1:GD7ceuh/HD7z6cdIwzKK2ctzgZ1qqYFJpsFp+8qYnbI= +github.com/sourcenetwork/raccoondb v0.2.0 h1:lQ/r8IUm1IMaivXWhqndgpisLsI59c6M9jn6ujKYBzk= +github.com/sourcenetwork/raccoondb v0.2.0/go.mod h1:A5ElVAhdf9yDjmpLrA3DLqYib09Fnuzm3sFUbY5r9BE= +github.com/sourcenetwork/sourcehub v0.2.1-0.20240305165631-9b75b1000724 h1:Dr13Lb9bTmycQZbNHAP+7RUVcy9g6jxL5rz74ipVyrs= +github.com/sourcenetwork/sourcehub v0.2.1-0.20240305165631-9b75b1000724/go.mod h1:jhWsUtCgIE6vDKg9/uvu1rXAOcVTrALjBXf2kLQGrCk= +github.com/sourcenetwork/zanzi v0.3.0 h1:Y9uyrpsT569QjzAxNOwWDxeWOkcntm+26qDLR7nGuo4= +github.com/sourcenetwork/zanzi v0.3.0/go.mod h1:eLQ94tdz96vfwHIZXL5ZoHbV9YHQeMyFeTc5hFSGDRU= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= @@ -542,6 +1104,7 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= @@ -549,9 +1112,15 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= -github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= -github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= +github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d h1:vfofYNRScrDdvS342BElfbETmL1Aiz3i2t0zfRj16Hs= +github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d/go.mod h1:RRCYJbIwD5jmqPI9XoAFR0OcDxqUctll6zUj/+B4S48= github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA= +github.com/tendermint/go-amino v0.16.0 h1:GyhmgQKvqF82e2oZeuMSp9JTN0N09emoSZlb2lyGa2E= +github.com/tendermint/go-amino v0.16.0/go.mod h1:TQU0M1i/ImAo+tYpZi73AU3V/dKeCoMC9Sphe2ZwGME= +github.com/tendermint/tm-db v0.6.7 h1:fE00Cbl0jayAoqlExN6oyQJ7fR/ZtoVOmvPJ//+shu8= +github.com/tendermint/tm-db v0.6.7/go.mod h1:byQDzFkZV1syXr/ReXS808NxA2xvyuuVgXOJ/088L6I= +github.com/teserakt-io/golang-ed25519 v0.0.0-20210104091850-3888c087a4c8 h1:RBkacARv7qY5laaXGlF4wFB/tk5rnthhPb8oIBGoagY= +github.com/teserakt-io/golang-ed25519 v0.0.0-20210104091850-3888c087a4c8/go.mod h1:9PdLyPiZIiW3UopXyRnPYyjUXSpiQNHRLu8fOsR3o8M= github.com/textileio/go-datastore-extensions v1.0.1 h1:qIJGqJaigQ1wD4TdwS/hf73u0HChhXvvUSJuxBEKS+c= github.com/textileio/go-datastore-extensions v1.0.1/go.mod h1:Pzj9FDRkb55910dr/FX8M7WywvnS26gBgEDez1ZBuLE= github.com/textileio/go-ds-badger3 v0.1.0 h1:q0kBuBmAcRUR3ClMSYlyw0224XeuzjjGinU53Qz1uXI= @@ -560,8 +1129,15 @@ github.com/textileio/go-log/v2 v2.1.3-gke-2 h1:YkMA5ua0Cf/X6CkbexInsoJ/HdaHQBlgi github.com/textileio/go-log/v2 v2.1.3-gke-2/go.mod h1:DwACkjFS3kjZZR/4Spx3aPfSsciyslwUe5bxV8CEU2w= github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI= github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.10/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ= @@ -582,10 +1158,30 @@ github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 h1:EKhdz github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1/go.mod h1:8UvriyWtv5Q5EOgjHaSseUEdkQfvwFv1I/In/O2M9gc= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/zondax/hid v0.9.2 h1:WCJFnEDMiqGF64nlZz28E9qLVZ0KSJ7xpc5DLEyma2U= +github.com/zondax/hid v0.9.2/go.mod h1:l5wttcP0jwtdLjqjMMWFVEE7d1zO0jvSPA9OPZxWpEM= +github.com/zondax/ledger-go v0.14.3 h1:wEpJt2CEcBJ428md/5MgSLsXLBos98sBOyxNmCjfUCw= +github.com/zondax/ledger-go v0.14.3/go.mod h1:IKKaoxupuB43g4NxeQmbLXv7T9AlQyie1UpHb342ycI= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= +go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= @@ -598,6 +1194,10 @@ go.opentelemetry.io/otel/sdk/metric v1.24.0 h1:yyMQrPzF+k88/DbH7o4FMAs80puqd+9os go.opentelemetry.io/otel/sdk/metric v1.24.0/go.mod h1:I6Y5FjH6rvEnTTAYQz3Mmv2kl6Ek5IIrmwTLqMrrOE0= go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= @@ -606,74 +1206,111 @@ go.uber.org/dig v1.17.1 h1:Tga8Lz8PcYNsWsyHMZ1Vm0OQOUaJNDyvPImgbAu9YSc= go.uber.org/dig v1.17.1/go.mod h1:Us0rSJiThwCv2GteUN0Q7OKvU7n5J4dxZ9JKUXozFdE= go.uber.org/fx v1.20.1 h1:zVwVQGS8zYvhh9Xxcu4w1M6ESyeMzebzj2NbSayZ4Mk= go.uber.org/fx v1.20.1/go.mod h1:iSYNbHf2y55acNCwCXKx7LbWb5WG1Bnue5RDXz1OREg= +go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= +go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE= golang.org/x/build v0.0.0-20190111050920-041ab4dc3f9d/go.mod h1:OWs+y06UdEOHN4y+MfF/py+xQ/tYqIWW03b70/CG9Rw= +golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc h1:ao2WRsKSzW6KuUY9IWPwWahcHCgR0s52IfwutMfEbdM= -golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/exp v0.0.0-20240213143201-ec583247a57a h1:HinSgX1tJRX3KsL//Gxynpw5CTOAIPhgL4W8PNiIpVE= +golang.org/x/exp v0.0.0-20240213143201-ec583247a57a/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= -golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181029044818-c44066c5c816/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181106065722-10aee1819953/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190313220215-9f648a60d977/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -686,42 +1323,86 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181029174526-d69651ed3497/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190316082340-a2f829d7f35f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220315194320-039c03cc5b86/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030000716-a0a13e073c7b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -729,28 +1410,34 @@ golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.16.1 h1:TLyB3WofjdOEepBHAU20JdNC1Zbg87elYofWYAY5oZA= -golang.org/x/tools v0.16.1/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= +golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ= +golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= gonum.org/v1/gonum v0.14.0 h1:2NiG67LD1tEH0D7kM+ps2V+fXmsAnpUeec7n8tcr4S0= gonum.org/v1/gonum v0.14.0/go.mod h1:AoWeoz0becf9QMWtE8iWXNXc27fK4fNeHNf/oMejGfU= google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.1.0/go.mod h1:UGEZY7KEX120AnNLIHFMKIo4obdJhkp2tPbaPlQx13Y= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -760,18 +1447,41 @@ google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20181029155118-b69ba1387ce2/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20181202183823-bd91e49a0898/go.mod h1:7Ep/1NZk928CDR8SjdVbjWNpdIf6nzjE3BTgJDr2Atg= google.golang.org/genproto v0.0.0-20190306203927-b5d61aea6440/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20220314164441-57ef72a4c106/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 h1:KAeGQVN3M9nD0/bQXnr/ClcEMJ968gUXJQ9pwfSynuQ= +google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80/go.mod h1:cc8bqMqtv9gMOr0zHg2Vzff5ULhhL2IXP4sbcn32Dro= +google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 h1:Lj5rbfG876hIAYFjqiJnPHfhXbv+nzTWfm04Fg/XSVU= +google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80/go.mod h1:4jWUdICTdgc3Ibxmr8nAJiiLHwQBY0UI0XZcEMaFKaA= google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 h1:AjyfHzEPEFp/NpvfN5g+KDla3EMojjhRVZc1i7cj+oM= google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -782,25 +1492,39 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= +google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= @@ -810,6 +1534,8 @@ gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o= honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -818,5 +1544,14 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= lukechampine.com/blake3 v1.2.1 h1:YuqqRuaqsGV71BV/nm9xlI0MKUv4QC54jQnBChWbGnI= lukechampine.com/blake3 v1.2.1/go.mod h1:0OFRp7fBtAylGVCO40o87sbupkyIGgbpv1+M1k1LM6k= +nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +nhooyr.io/websocket v1.8.7 h1:usjR2uOr/zjjkVMy0lW+PPohFok7PCow5sDjLgX4P4g= +nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +pgregory.net/rapid v1.1.0 h1:CMa0sjHSru3puNx+J0MIAuiiEV4N0qj8/cMWGBBCsjw= +pgregory.net/rapid v1.1.0/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= +sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= sourcegraph.com/sourcegraph/go-diff v0.5.0/go.mod h1:kuch7UrkMzY0X+p9CRK03kfuPQ2zzQcaEFbx8wA8rck= sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0= diff --git a/http/client.go b/http/client.go index 33b9c21fb8..69c5f2a503 100644 --- a/http/client.go +++ b/http/client.go @@ -22,9 +22,10 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" "github.com/lens-vm/lens/host-go/config/model" - "github.com/sourcenetwork/immutable" sse "github.com/vito/go-sse/sse" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" @@ -341,7 +342,11 @@ func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][ return indexes, nil } -func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestResult { +func (c *Client) ExecRequest( + ctx context.Context, + identity immutable.Option[string], + query string, +) *client.RequestResult { methodURL := c.http.baseURL.JoinPath("graphql") result := &client.RequestResult{} @@ -356,6 +361,7 @@ func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestR return result } c.http.setDefaultHeaders(req) + addIdentityToAuthHeaderIfExists(req, identity) res, err := c.http.client.Do(req) if err != nil { diff --git a/http/client_acp.go b/http/client_acp.go new file mode 100644 index 0000000000..bdd9e6ed3b --- /dev/null +++ b/http/client_acp.go @@ -0,0 +1,63 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + + "github.com/sourcenetwork/defradb/client" +) + +// AddPolicyResult wraps the result of successfully adding/registering a Policy. +type AddPolicyRequest struct { + // Policy body in JSON or YAML format. + Policy string `json:"policy"` +} + +func (c *Client) AddPolicy( + ctx context.Context, + creator string, + policy string, +) (client.AddPolicyResult, error) { + methodURL := c.http.baseURL.JoinPath("acp", "policy") + + addPolicyRequest := AddPolicyRequest{ + Policy: policy, + } + + addPolicyBody, err := json.Marshal(addPolicyRequest) + if err != nil { + return client.AddPolicyResult{}, err + } + + req, err := http.NewRequestWithContext( + ctx, + http.MethodPost, + methodURL.String(), + bytes.NewBuffer(addPolicyBody), + ) + + addIdentityToAuthHeader(req, creator) + + if err != nil { + return client.AddPolicyResult{}, err + } + + var policyResult client.AddPolicyResult + if err := c.http.requestJson(req, &policyResult); err != nil { + return client.AddPolicyResult{}, err + } + + return policyResult, nil +} diff --git a/http/client_collection.go b/http/client_collection.go index 876c175338..c53bc7e7ff 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -60,7 +60,11 @@ func (c *Collection) Definition() client.CollectionDefinition { return c.def } -func (c *Collection) Create(ctx context.Context, doc *client.Document) error { +func (c *Collection) Create( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -71,10 +75,14 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { if err != nil { return err } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(body)) if err != nil { return err } + + addIdentityToAuthHeaderIfExists(req, identity) + _, err = c.http.request(req) if err != nil { return err @@ -83,7 +91,11 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { return nil } -func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { +func (c *Collection) CreateMany( + ctx context.Context, + identity immutable.Option[string], + docs []*client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -97,25 +109,35 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er } docMapList = append(docMapList, docMap) } + body, err := json.Marshal(docMapList) if err != nil { return err } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) if err != nil { return err } + + addIdentityToAuthHeaderIfExists(req, identity) + _, err = c.http.request(req) if err != nil { return err } + for _, doc := range docs { doc.Clean() } return nil } -func (c *Collection) Update(ctx context.Context, doc *client.Document) error { +func (c *Collection) Update( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -130,6 +152,9 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { if err != nil { return err } + + addIdentityToAuthHeaderIfExists(req, identity) + _, err = c.http.request(req) if err != nil { return err @@ -138,18 +163,26 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { return nil } -func (c *Collection) Save(ctx context.Context, doc *client.Document) error { - _, err := c.Get(ctx, doc.ID(), true) +func (c *Collection) Save( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { + _, err := c.Get(ctx, identity, doc.ID(), true) if err == nil { - return c.Update(ctx, doc) + return c.Update(ctx, identity, doc) } - if errors.Is(err, client.ErrDocumentNotFound) { - return c.Create(ctx, doc) + if errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) { + return c.Create(ctx, identity, doc) } return err } -func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { +func (c *Collection) Delete( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, +) (bool, error) { if !c.Description().Name.HasValue() { return false, client.ErrOperationNotPermittedOnNamelessCols } @@ -160,6 +193,9 @@ func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, erro if err != nil { return false, err } + + addIdentityToAuthHeaderIfExists(req, identity) + _, err = c.http.request(req) if err != nil { return false, err @@ -167,22 +203,31 @@ func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, erro return true, nil } -func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { - _, err := c.Get(ctx, docID, false) +func (c *Collection) Exists( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, +) (bool, error) { + _, err := c.Get(ctx, identity, docID, false) if err != nil { return false, err } return true, nil } -func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { +func (c *Collection) UpdateWith( + ctx context.Context, + identity immutable.Option[string], + target any, + updater string, +) (*client.UpdateResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, t, updater) + return c.UpdateWithFilter(ctx, identity, t, updater) case client.DocID: - return c.UpdateWithDocID(ctx, t, updater) + return c.UpdateWithDocID(ctx, identity, t, updater) case []client.DocID: - return c.UpdateWithDocIDs(ctx, t, updater) + return c.UpdateWithDocIDs(ctx, identity, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -190,6 +235,7 @@ func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) func (c *Collection) updateWith( ctx context.Context, + identity immutable.Option[string], request CollectionUpdateRequest, ) (*client.UpdateResult, error) { if !c.Description().Name.HasValue() { @@ -206,6 +252,9 @@ func (c *Collection) updateWith( if err != nil { return nil, err } + + addIdentityToAuthHeaderIfExists(req, identity) + var result client.UpdateResult if err := c.http.requestJson(req, &result); err != nil { return nil, err @@ -215,28 +264,39 @@ func (c *Collection) updateWith( func (c *Collection) UpdateWithFilter( ctx context.Context, + identity immutable.Option[string], filter any, updater string, ) (*client.UpdateResult, error) { - return c.updateWith(ctx, CollectionUpdateRequest{ - Filter: filter, - Updater: updater, - }) + return c.updateWith( + ctx, + identity, + CollectionUpdateRequest{ + Filter: filter, + Updater: updater, + }, + ) } func (c *Collection) UpdateWithDocID( ctx context.Context, + identity immutable.Option[string], docID client.DocID, updater string, ) (*client.UpdateResult, error) { - return c.updateWith(ctx, CollectionUpdateRequest{ - DocID: docID.String(), - Updater: updater, - }) + return c.updateWith( + ctx, + identity, + CollectionUpdateRequest{ + DocID: docID.String(), + Updater: updater, + }, + ) } func (c *Collection) UpdateWithDocIDs( ctx context.Context, + identity immutable.Option[string], docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -244,20 +304,28 @@ func (c *Collection) UpdateWithDocIDs( for _, docID := range docIDs { strDocIDs = append(strDocIDs, docID.String()) } - return c.updateWith(ctx, CollectionUpdateRequest{ - DocIDs: strDocIDs, - Updater: updater, - }) + return c.updateWith( + ctx, + identity, + CollectionUpdateRequest{ + DocIDs: strDocIDs, + Updater: updater, + }, + ) } -func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { +func (c *Collection) DeleteWith( + ctx context.Context, + identity immutable.Option[string], + target any, +) (*client.DeleteResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, t) + return c.DeleteWithFilter(ctx, identity, t) case client.DocID: - return c.DeleteWithDocID(ctx, t) + return c.DeleteWithDocID(ctx, identity, t) case []client.DocID: - return c.DeleteWithDocIDs(ctx, t) + return c.DeleteWithDocIDs(ctx, identity, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -265,6 +333,7 @@ func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.Delete func (c *Collection) deleteWith( ctx context.Context, + identity immutable.Option[string], request CollectionDeleteRequest, ) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { @@ -277,10 +346,14 @@ func (c *Collection) deleteWith( if err != nil { return nil, err } + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) if err != nil { return nil, err } + + addIdentityToAuthHeaderIfExists(req, identity) + var result client.DeleteResult if err := c.http.requestJson(req, &result); err != nil { return nil, err @@ -288,29 +361,58 @@ func (c *Collection) deleteWith( return &result, nil } -func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { - return c.deleteWith(ctx, CollectionDeleteRequest{ - Filter: filter, - }) +func (c *Collection) DeleteWithFilter( + ctx context.Context, + identity immutable.Option[string], + filter any, +) (*client.DeleteResult, error) { + return c.deleteWith( + ctx, + identity, + CollectionDeleteRequest{ + Filter: filter, + }, + ) } -func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { - return c.deleteWith(ctx, CollectionDeleteRequest{ - DocID: docID.String(), - }) +func (c *Collection) DeleteWithDocID( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, +) (*client.DeleteResult, error) { + return c.deleteWith( + ctx, + identity, + CollectionDeleteRequest{ + DocID: docID.String(), + }, + ) } -func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocIDs( + ctx context.Context, + identity immutable.Option[string], + docIDs []client.DocID, +) (*client.DeleteResult, error) { var strDocIDs []string for _, docID := range docIDs { strDocIDs = append(strDocIDs, docID.String()) } - return c.deleteWith(ctx, CollectionDeleteRequest{ - DocIDs: strDocIDs, - }) + return c.deleteWith( + ctx, + identity, + CollectionDeleteRequest{ + DocIDs: strDocIDs, + }, + ) } -func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, + showDeleted bool, +) (*client.Document, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -327,6 +429,9 @@ func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bo if err != nil { return nil, err } + + addIdentityToAuthHeaderIfExists(req, identity) + data, err := c.http.request(req) if err != nil { return nil, err @@ -347,7 +452,10 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { } } -func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { +func (c *Collection) GetAllDocIDs( + ctx context.Context, + identity immutable.Option[string], +) (<-chan client.DocIDResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -358,8 +466,11 @@ func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResul if err != nil { return nil, err } + c.http.setDefaultHeaders(req) + addIdentityToAuthHeaderIfExists(req, identity) + res, err := c.http.client.Do(req) if err != nil { return nil, err diff --git a/http/errors.go b/http/errors.go index 1510c2e520..ef25d06421 100644 --- a/http/errors.go +++ b/http/errors.go @@ -19,6 +19,7 @@ import ( const ( errFailedToLoadKeys string = "failed to load given keys" errMethodIsNotImplemented string = "the method is not implemented" + errFailedToGetContext string = "failed to get context" ) // Errors returnable from this package. @@ -54,6 +55,13 @@ func (e *errorResponse) UnmarshalJSON(data []byte) error { return nil } +func NewErrFailedToGetContext(contextType string) error { + return errors.New( + errFailedToGetContext, + errors.NewKV("ContextType", contextType), + ) +} + func NewErrFailedToLoadKeys(inner error, publicKeyPath, privateKeyPath string) error { return errors.Wrap( errFailedToLoadKeys, diff --git a/http/handler.go b/http/handler.go index b06ef06cb6..7cd278593b 100644 --- a/http/handler.go +++ b/http/handler.go @@ -31,6 +31,7 @@ var playgroundHandler http.Handler = http.HandlerFunc(http.NotFound) func NewApiRouter() (*Router, error) { tx_handler := &txHandler{} store_handler := &storeHandler{} + acp_handler := &acpHandler{} collection_handler := &collectionHandler{} p2p_handler := &p2pHandler{} lens_handler := &lensHandler{} @@ -43,6 +44,7 @@ func NewApiRouter() (*Router, error) { tx_handler.bindRoutes(router) store_handler.bindRoutes(router) + acp_handler.bindRoutes(router) p2p_handler.bindRoutes(router) ccip_handler.bindRoutes(router) diff --git a/http/handler_acp.go b/http/handler_acp.go new file mode 100644 index 0000000000..b754223c91 --- /dev/null +++ b/http/handler_acp.go @@ -0,0 +1,83 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + + "github.com/getkin/kin-openapi/openapi3" + + "github.com/sourcenetwork/defradb/acp" + "github.com/sourcenetwork/defradb/client" +) + +type acpHandler struct{} + +func (s *acpHandler) AddPolicy(rw http.ResponseWriter, req *http.Request) { + db, ok := req.Context().Value(dbContextKey).(client.DB) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{NewErrFailedToGetContext("db")}) + return + } + + var addPolicyRequest AddPolicyRequest + if err := requestJSON(req, &addPolicyRequest); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + identity := getIdentityFromAuthHeader(req) + if !identity.HasValue() { + responseJSON(rw, http.StatusBadRequest, errorResponse{acp.ErrPolicyCreatorMustNotBeEmpty}) + return + } + + addPolicyResult, err := db.AddPolicy( + req.Context(), + identity.Value(), + addPolicyRequest.Policy, + ) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + responseJSON(rw, http.StatusOK, addPolicyResult) +} + +func (h *acpHandler) bindRoutes(router *Router) { + successResponse := &openapi3.ResponseRef{ + Ref: "#/components/responses/success", + } + errorResponse := &openapi3.ResponseRef{ + Ref: "#/components/responses/error", + } + acpAddPolicySchema := &openapi3.SchemaRef{ + Ref: "#/components/schemas/add_policy_request", + } + + acpAddPolicyRequest := openapi3.NewRequestBody(). + WithRequired(true). + WithJSONSchemaRef(acpAddPolicySchema) + + acpAddPolicy := openapi3.NewOperation() + acpAddPolicy.OperationID = "add policy" + acpAddPolicy.Description = "Add a policy using acp system" + acpAddPolicy.Tags = []string{"acp_policy"} + acpAddPolicy.Responses = openapi3.NewResponses() + acpAddPolicy.Responses.Set("200", successResponse) + acpAddPolicy.Responses.Set("400", errorResponse) + acpAddPolicy.RequestBody = &openapi3.RequestBodyRef{ + Value: acpAddPolicyRequest, + } + + router.AddRoute("/acp/policy", http.MethodPost, acpAddPolicy, h.AddPolicy) +} diff --git a/http/handler_ccip.go b/http/handler_ccip.go index c0eb6a5918..36151c5cc3 100644 --- a/http/handler_ccip.go +++ b/http/handler_ccip.go @@ -60,7 +60,8 @@ func (c *ccipHandler) ExecCCIP(rw http.ResponseWriter, req *http.Request) { return } - result := store.ExecRequest(req.Context(), request.Query) + identity := getIdentityFromAuthHeader(req) + result := store.ExecRequest(req.Context(), identity, request.Query) if result.Pub != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{ErrStreamingNotSupported}) return diff --git a/http/handler_ccip_test.go b/http/handler_ccip_test.go index 2a2cc4f077..37e0da951f 100644 --- a/http/handler_ccip_test.go +++ b/http/handler_ccip_test.go @@ -25,6 +25,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/db" @@ -206,7 +207,7 @@ func setupDatabase(t *testing.T) client.DB { doc, err := client.NewDocFromJSON([]byte(`{"name": "bob"}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) return cdb diff --git a/http/handler_collection.go b/http/handler_collection.go index d713afdf40..1f41442849 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -47,6 +47,8 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { return } + identity := getIdentityFromAuthHeader(req) + switch { case client.IsJSONArray(data): docList, err := client.NewDocsFromJSON(data, col.Schema()) @@ -55,7 +57,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { return } - if err := col.CreateMany(req.Context(), docList); err != nil { + if err := col.CreateMany(req.Context(), identity, docList); err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } @@ -66,7 +68,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - if err := col.Create(req.Context(), doc); err != nil { + if err := col.Create(req.Context(), identity, doc); err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } @@ -83,9 +85,11 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request return } + identity := getIdentityFromAuthHeader(req) + switch { case request.Filter != nil: - result, err := col.DeleteWith(req.Context(), request.Filter) + result, err := col.DeleteWith(req.Context(), identity, request.Filter) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -97,7 +101,7 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := col.DeleteWith(req.Context(), docID) + result, err := col.DeleteWith(req.Context(), identity, docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -113,7 +117,7 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request } docIDs = append(docIDs, docID) } - result, err := col.DeleteWith(req.Context(), docIDs) + result, err := col.DeleteWith(req.Context(), identity, docIDs) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -133,9 +137,11 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request return } + identity := getIdentityFromAuthHeader(req) + switch { case request.Filter != nil: - result, err := col.UpdateWith(req.Context(), request.Filter, request.Updater) + result, err := col.UpdateWith(req.Context(), identity, request.Filter, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -147,7 +153,7 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := col.UpdateWith(req.Context(), docID, request.Updater) + result, err := col.UpdateWith(req.Context(), identity, docID, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -163,7 +169,7 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request } docIDs = append(docIDs, docID) } - result, err := col.UpdateWith(req.Context(), docIDs, request.Updater) + result, err := col.UpdateWith(req.Context(), identity, docIDs, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -182,11 +188,20 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - doc, err := col.Get(req.Context(), docID, true) + + identity := getIdentityFromAuthHeader(req) + + doc, err := col.Get(req.Context(), identity, docID, true) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + + if doc == nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{client.ErrDocumentNotFoundOrNotAuthorized}) + return + } + patch, err := io.ReadAll(req.Body) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) @@ -196,7 +211,7 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - err = col.Update(req.Context(), doc) + err = col.Update(req.Context(), identity, doc) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -212,7 +227,10 @@ func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - _, err = col.Delete(req.Context(), docID) + + identity := getIdentityFromAuthHeader(req) + + _, err = col.Delete(req.Context(), identity, docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -229,11 +247,20 @@ func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - doc, err := col.Get(req.Context(), docID, showDeleted) + + identity := getIdentityFromAuthHeader(req) + + doc, err := col.Get(req.Context(), identity, docID, showDeleted) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + + if doc == nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{client.ErrDocumentNotFoundOrNotAuthorized}) + return + } + docMap, err := doc.ToMap() if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) @@ -256,7 +283,9 @@ func (s *collectionHandler) GetAllDocIDs(rw http.ResponseWriter, req *http.Reque return } - docIDsResult, err := col.GetAllDocIDs(req.Context()) + identity := getIdentityFromAuthHeader(req) + + docIDsResult, err := col.GetAllDocIDs(req.Context(), identity) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return diff --git a/http/handler_store.go b/http/handler_store.go index 6077e6ea60..4c57eda34f 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -311,7 +311,9 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{ErrMissingRequest}) return } - result := store.ExecRequest(req.Context(), request.Query) + + identity := getIdentityFromAuthHeader(req) + result := store.ExecRequest(req.Context(), identity, request.Query) if result.Pub == nil { responseJSON(rw, http.StatusOK, GraphQLResponse{result.GQL.Data, result.GQL.Errors}) @@ -621,7 +623,6 @@ func (h *storeHandler) bindRoutes(router *Router) { router.AddRoute("/collections", http.MethodGet, collectionDescribe, h.GetCollection) router.AddRoute("/collections", http.MethodPatch, patchCollection, h.PatchCollection) router.AddRoute("/view", http.MethodPost, views, h.AddView) - router.AddRoute("/view", http.MethodPost, views, h.AddView) router.AddRoute("/graphql", http.MethodGet, graphQLGet, h.ExecRequest) router.AddRoute("/graphql", http.MethodPost, graphQLPost, h.ExecRequest) router.AddRoute("/debug/dump", http.MethodGet, debugDump, h.PrintDump) diff --git a/http/openapi.go b/http/openapi.go index 12a832c704..9e1f58c854 100644 --- a/http/openapi.go +++ b/http/openapi.go @@ -40,6 +40,7 @@ var openApiSchemas = map[string]any{ "ccip_response": &CCIPResponse{}, "patch_schema_request": &patchSchemaRequest{}, "add_view_request": &addViewRequest{}, + "add_policy_request": &AddPolicyRequest{}, "migrate_request": &migrateRequest{}, "set_migration_request": &setMigrationRequest{}, } @@ -134,6 +135,10 @@ func NewOpenAPISpec() (*openapi3.T, error) { Name: "p2p", Description: "Peer-to-peer network operations", }, + &openapi3.Tag{ + Name: "acp", + Description: "Access control policy operations", + }, &openapi3.Tag{ Name: "transaction", Description: "Database transaction operations", diff --git a/http/utils.go b/http/utils.go index c7b1507c4e..97d9a9181c 100644 --- a/http/utils.go +++ b/http/utils.go @@ -15,11 +15,22 @@ import ( "fmt" "io" "net/http" + "strings" + "github.com/sourcenetwork/immutable" + + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore/badger/v4" ) +// Using Basic right now, but this will soon change to 'Bearer' as acp authentication +// gets implemented: https://github.com/sourcenetwork/defradb/issues/2017 +const authSchemaPrefix = "Basic " + +// Name of authorization header +const authHeaderName = "Authorization" + func requestJSON(req *http.Request, out any) error { data, err := io.ReadAll(req.Body) if err != nil { @@ -36,11 +47,46 @@ func responseJSON(rw http.ResponseWriter, status int, out any) { func parseError(msg any) error { switch msg { - case client.ErrDocumentNotFound.Error(): - return client.ErrDocumentNotFound + case client.ErrDocumentNotFoundOrNotAuthorized.Error(): + return client.ErrDocumentNotFoundOrNotAuthorized case badger.ErrTxnConflict.Error(): return badger.ErrTxnConflict default: return fmt.Errorf("%s", msg) } } + +// addIdentityToAuthHeader adds the identity to auth header as it must always exist. +func addIdentityToAuthHeader(req *http.Request, identity string) { + // Create a bearer that will get added to authorization header. + bearerWithIdentity := authSchemaPrefix + identity + + // Add the authorization header with the bearer containing identity. + req.Header.Add(authHeaderName, bearerWithIdentity) +} + +// addIdentityToAuthHeaderIfExists adds the identity to auth header if it exsits, otherwise does nothing. +func addIdentityToAuthHeaderIfExists(req *http.Request, identity immutable.Option[string]) { + // Do nothing if there is no identity to add. + if !identity.HasValue() { + return + } + addIdentityToAuthHeader(req, identity.Value()) +} + +// getIdentityFromAuthHeader tries to get the identity from the auth header, if it is found +// with the expecte auth schema then it is returned, otherwise no identity is returned. +func getIdentityFromAuthHeader(req *http.Request) immutable.Option[string] { + authHeader := req.Header.Get(authHeaderName) + if authHeader == "" { + return acpIdentity.NoIdentity + } + + identity := strings.TrimPrefix(authHeader, authSchemaPrefix) + // If expected schema prefix was not found, or empty, then assume no identity. + if identity == authHeader || identity == "" { + return acpIdentity.NoIdentity + } + + return acpIdentity.NewIdentity(identity) +} diff --git a/lens/fetcher.go b/lens/fetcher.go index 90c80c78fb..f4895a0fd7 100644 --- a/lens/fetcher.go +++ b/lens/fetcher.go @@ -16,6 +16,9 @@ import ( "github.com/fxamacker/cbor/v2" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" @@ -35,6 +38,7 @@ type lensedFetcher struct { txn datastore.Txn col client.Collection + // Cache the fieldDescriptions mapped by name to allow for cheaper access within the fetcher loop fieldDescriptionsByName map[string]client.FieldDefinition @@ -57,7 +61,9 @@ func NewFetcher(source fetcher.Fetcher, registry client.LensRegistry) fetcher.Fe func (f *lensedFetcher) Init( ctx context.Context, + identity immutable.Option[string], txn datastore.Txn, + acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, @@ -105,7 +111,18 @@ historyLoop: } else { innerFetcherFields = fields } - return f.source.Init(ctx, txn, col, innerFetcherFields, filter, docmapper, reverse, showDeleted) + return f.source.Init( + ctx, + identity, + txn, + acp, + col, + innerFetcherFields, + filter, + docmapper, + reverse, + showDeleted, + ) } func (f *lensedFetcher) Start(ctx context.Context, spans core.Spans) error { diff --git a/net/client_test.go b/net/client_test.go index 89c26e06b5..3be892e3f2 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -18,6 +18,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/events" ) @@ -113,12 +114,12 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "test"}`), col.Schema()) require.NoError(t, err) - err = col.Save(ctx, doc) + err = col.Save(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) col, err = n2.db.GetCollectionByName(ctx, "User") require.NoError(t, err) - err = col.Save(ctx, doc) + err = col.Save(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) cid, err := createCID(doc) diff --git a/net/dag_test.go b/net/dag_test.go index ddd9e9aab3..5229e68e93 100644 --- a/net/dag_test.go +++ b/net/dag_test.go @@ -22,6 +22,7 @@ import ( mh "github.com/multiformats/go-multihash" "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/merkle/clock" @@ -172,7 +173,7 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) txn1, _ := db1.NewTxn(ctx, false) diff --git a/net/errors.go b/net/errors.go index 1ca2d857d5..773eb8765d 100644 --- a/net/errors.go +++ b/net/errors.go @@ -29,12 +29,15 @@ const ( ) var ( - ErrPeerConnectionWaitTimout = errors.New("waiting for peer connection timed out") - ErrPubSubWaitTimeout = errors.New("waiting for pubsub timed out") - ErrPushLogWaitTimeout = errors.New("waiting for pushlog timed out") - ErrNilDB = errors.New("database object can't be nil") - ErrNilUpdateChannel = errors.New("tried to subscribe to update channel, but update channel is nil") - ErrSelfTargetForReplicator = errors.New("can't target ourselves as a replicator") + ErrP2PColHasPolicy = errors.New("p2p collection specified has a policy on it") + ErrReplicatorColHasPolicy = errors.New("replicator collection specified has a policy on it") + ErrReplicatorSomeColsHavePolicy = errors.New("replicator can not use all collections, as some have policy") + ErrPeerConnectionWaitTimout = errors.New("waiting for peer connection timed out") + ErrPubSubWaitTimeout = errors.New("waiting for pubsub timed out") + ErrPushLogWaitTimeout = errors.New("waiting for pushlog timed out") + ErrNilDB = errors.New("database object can't be nil") + ErrNilUpdateChannel = errors.New("tried to subscribe to update channel, but update channel is nil") + ErrSelfTargetForReplicator = errors.New("can't target ourselves as a replicator") ) func NewErrPushLog(inner error, kv ...errors.KV) error { diff --git a/net/node_test.go b/net/node_test.go index 3b7f28d017..bf0bc653c5 100644 --- a/net/node_test.go +++ b/net/node_test.go @@ -31,13 +31,11 @@ import ( func FixtureNewMemoryDBWithBroadcaster(t *testing.T) client.DB { var database client.DB - var options []db.Option ctx := context.Background() - options = append(options, db.WithUpdateEvents()) opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} rootstore, err := badgerds.NewDatastore("", &opts) require.NoError(t, err) - database, err = db.NewDB(ctx, rootstore, options...) + database, err = db.NewDB(ctx, rootstore, db.WithUpdateEvents()) require.NoError(t, err) return database } diff --git a/net/peer_collection.go b/net/peer_collection.go index 6f4f4d8ba8..4ef1139a1c 100644 --- a/net/peer_collection.go +++ b/net/peer_collection.go @@ -16,6 +16,7 @@ import ( dsq "github.com/ipfs/go-datastore/query" "github.com/sourcenetwork/immutable" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" ) @@ -47,6 +48,14 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er storeCollections = append(storeCollections, storeCol...) } + // Ensure none of the collections have a policy on them, until following is implemented: + // TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 + for _, col := range storeCollections { + if col.Description().Policy.HasValue() { + return ErrP2PColHasPolicy + } + } + // Ensure we can add all the collections to the store on the transaction // before adding to topics. for _, col := range storeCollections { @@ -71,7 +80,8 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er // from the pubsub topics to avoid receiving duplicate events. removedTopics := []string{} for _, col := range storeCollections { - keyChan, err := col.GetAllDocIDs(p.ctx) + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + keyChan, err := col.GetAllDocIDs(p.ctx, acpIdentity.NoIdentity) if err != nil { return err } @@ -141,7 +151,8 @@ func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) // to the pubsub topics. addedTopics := []string{} for _, col := range storeCollections { - keyChan, err := col.GetAllDocIDs(p.ctx) + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + keyChan, err := col.GetAllDocIDs(p.ctx, acpIdentity.NoIdentity) if err != nil { return err } diff --git a/net/peer_replicator.go b/net/peer_replicator.go index 3638122a2a..93f6070f0b 100644 --- a/net/peer_replicator.go +++ b/net/peer_replicator.go @@ -18,6 +18,7 @@ import ( "github.com/libp2p/go-libp2p/core/peer" "github.com/libp2p/go-libp2p/core/peerstore" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" ) @@ -48,15 +49,30 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { if err != nil { return NewErrReplicatorCollections(err) } + + if col.Description().Policy.HasValue() { + return ErrReplicatorColHasPolicy + } + collections = append(collections, col) } default: - // default to all collections - collections, err = p.db.WithTxn(txn).GetCollections(ctx, client.CollectionFetchOptions{}) + // default to all collections (unless a collection contains a policy). + // TODO-ACP: default to all collections after resolving https://github.com/sourcenetwork/defradb/issues/2366 + allCollections, err := p.db.WithTxn(txn).GetCollections(ctx, client.CollectionFetchOptions{}) if err != nil { return NewErrReplicatorCollections(err) } + + for _, col := range allCollections { + // Can not default to all collections if any collection has a policy. + // TODO-ACP: remove this check/loop after https://github.com/sourcenetwork/defradb/issues/2366 + if col.Description().Policy.HasValue() { + return ErrReplicatorSomeColsHavePolicy + } + } + collections = allCollections } rep.Schemas = nil @@ -92,7 +108,8 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { // push all collection documents to the replicator peer for _, col := range added { - keysCh, err := col.WithTxn(txn).GetAllDocIDs(ctx) + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + keysCh, err := col.WithTxn(txn).GetAllDocIDs(ctx, acpIdentity.NoIdentity) if err != nil { return NewErrReplicatorDocID(err, col.Name().Value(), rep.Info.ID) } diff --git a/net/peer_test.go b/net/peer_test.go index 0a863b8112..3350e026ba 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -12,6 +12,7 @@ package net import ( "context" + "fmt" "testing" "time" @@ -25,6 +26,7 @@ import ( rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core/crdt" "github.com/sourcenetwork/defradb/datastore/memory" @@ -115,7 +117,7 @@ const randomMultiaddr = "/ip4/127.0.0.1/tcp/0" func newTestNode(ctx context.Context, t *testing.T) (client.DB, *Node) { store := memory.NewDatastore(ctx) - db, err := db.NewDB(ctx, store, db.WithUpdateEvents()) + db, err := db.NewDB(ctx, store, db.WithUpdateEvents(), db.WithACPInMemory()) require.NoError(t, err) n, err := NewNode( @@ -169,7 +171,7 @@ func TestNewPeer_WithExistingTopic_TopicAlreadyExistsError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) h, err := libp2p.New() @@ -389,6 +391,105 @@ func TestSetReplicator_NoError(t *testing.T) { require.NoError(t, err) } +// This test documents that we don't allow setting replicator with a collection that has a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestSetReplicatorWithACollectionSpecifiedThatHasPolicy_ReturnError(t *testing.T) { + ctx := context.Background() + db, n := newTestNode(ctx, t) + defer n.Close() + + policy := ` + description: a policy + actor: + name: actor + resources: + user: + permissions: + read: + expr: owner + write: + expr: owner + relations: + owner: + types: + - actor + ` + policyResult, err := db.AddPolicy(ctx, "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", policy) + policyID := policyResult.PolicyID + require.NoError(t, err) + require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) + + schema := fmt.Sprintf(` + type User @policy(id: "%s", resource: "user") { + name: String + age: Int + } + `, policyID, + ) + _, err = db.AddSchema(ctx, schema) + require.NoError(t, err) + + info, err := peer.AddrInfoFromString("/ip4/0.0.0.0/tcp/0/p2p/QmYyQSo1c1Ym7orWxLYvCrM2EmxFTANf8wXmmE7DWjhx5N") + require.NoError(t, err) + + err = n.Peer.SetReplicator(ctx, client.Replicator{ + Info: *info, + Schemas: []string{"User"}, + }) + require.Error(t, err) + require.ErrorIs(t, err, ErrReplicatorColHasPolicy) +} + +// This test documents that we don't allow setting replicator using default option when any collection has a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestSetReplicatorWithSomeCollectionThatHasPolicyUsingAllCollectionsByDefault_ReturnError(t *testing.T) { + ctx := context.Background() + db, n := newTestNode(ctx, t) + defer n.Close() + + policy := ` + description: a policy + actor: + name: actor + resources: + user: + permissions: + read: + expr: owner + write: + expr: owner + relations: + owner: + types: + - actor + ` + policyResult, err := db.AddPolicy(ctx, "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", policy) + policyID := policyResult.PolicyID + require.NoError(t, err) + require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) + + schema := fmt.Sprintf(` + type User @policy(id: "%s", resource: "user") { + name: String + age: Int + } + `, policyID, + ) + _, err = db.AddSchema(ctx, schema) + require.NoError(t, err) + + info, err := peer.AddrInfoFromString("/ip4/0.0.0.0/tcp/0/p2p/QmYyQSo1c1Ym7orWxLYvCrM2EmxFTANf8wXmmE7DWjhx5N") + require.NoError(t, err) + + err = n.Peer.SetReplicator(ctx, client.Replicator{ + Info: *info, + // Note: The missing explicit input of schemas here + }) + require.ErrorIs(t, err, ErrReplicatorSomeColsHavePolicy) +} + func TestSetReplicator_WithInvalidAddress_EmptyPeerIDError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) @@ -475,10 +576,10 @@ func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *test doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) - keysCh, err := col.GetAllDocIDs(ctx) + keysCh, err := col.GetAllDocIDs(ctx, acpIdentity.NoIdentity) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -698,6 +799,53 @@ func TestAddP2PCollections_WithInvalidCollectionID_NotFoundError(t *testing.T) { require.Error(t, err, ds.ErrNotFound) } +// This test documents that we don't allow adding p2p collections that have a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestAddP2PCollectionsWithPermissionedCollection_Error(t *testing.T) { + ctx := context.Background() + db, n := newTestNode(ctx, t) + defer n.Close() + + policy := ` + description: a policy + actor: + name: actor + resources: + user: + permissions: + read: + expr: owner + write: + expr: owner + relations: + owner: + types: + - actor + ` + policyResult, err := db.AddPolicy(ctx, "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", policy) + policyID := policyResult.PolicyID + require.NoError(t, err) + require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) + + schema := fmt.Sprintf(` + type User @policy(id: "%s", resource: "user") { + name: String + age: Int + } + `, policyID, + ) + _, err = db.AddSchema(ctx, schema) + require.NoError(t, err) + + col, err := db.GetCollectionByName(ctx, "User") + require.NoError(t, err) + + err = n.Peer.AddP2PCollections(ctx, []string{col.SchemaRoot()}) + require.Error(t, err) + require.ErrorIs(t, err, ErrP2PColHasPolicy) +} + func TestAddP2PCollections_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) @@ -792,7 +940,7 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) docCid, err := createCID(doc) @@ -845,7 +993,7 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.ID().String(), true) @@ -875,7 +1023,7 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) docCid, err := createCID(doc) @@ -928,7 +1076,7 @@ func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) docCid, err := createCID(doc) @@ -972,7 +1120,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) docCid, err := createCID(doc) diff --git a/net/server.go b/net/server.go index 41cfd3625b..58a9f16f75 100644 --- a/net/server.go +++ b/net/server.go @@ -29,6 +29,7 @@ import ( grpcpeer "google.golang.org/grpc/peer" "google.golang.org/protobuf/proto" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore/badger/v4" @@ -107,7 +108,8 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) if _, ok := colMap[col.SchemaRoot()]; ok { continue } - docIDChan, err := col.GetAllDocIDs(p.ctx) + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + docIDChan, err := col.GetAllDocIDs(p.ctx, acpIdentity.NoIdentity) if err != nil { return nil, err } @@ -353,14 +355,18 @@ func (s *server) syncIndexedDocs( return err } - oldDoc, err := preTxnCol.Get(ctx, docID, false) - isNewDoc := errors.Is(err, client.ErrDocumentNotFound) + //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 + // Resolve while handling acp <> secondary indexes. + oldDoc, err := preTxnCol.Get(ctx, acpIdentity.NoIdentity, docID, false) + isNewDoc := errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) if !isNewDoc && err != nil { return err } - doc, err := col.Get(ctx, docID, false) - isDeletedDoc := errors.Is(err, client.ErrDocumentNotFound) + //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 + // Resolve while handling acp <> secondary indexes. + doc, err := col.Get(ctx, acpIdentity.NoIdentity, docID, false) + isDeletedDoc := errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) if !isDeletedDoc && err != nil { return err } diff --git a/net/server_test.go b/net/server_test.go index 099f426887..5e6eda3d1d 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -18,9 +18,11 @@ import ( "github.com/libp2p/go-libp2p/core/event" "github.com/libp2p/go-libp2p/core/host" rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" + "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/require" grpcpeer "google.golang.org/grpc/peer" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/errors" @@ -98,7 +100,11 @@ type mockCollection struct { func (mCol *mockCollection) SchemaRoot() string { return "mockColID" } -func (mCol *mockCollection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { +func (mCol *mockCollection) GetAllDocIDs( + ctx context.Context, + identity immutable.Option[string], + +) (<-chan client.DocIDResult, error) { return nil, mockError } @@ -134,7 +140,7 @@ func TestNewServerWithAddTopicError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) @@ -180,7 +186,7 @@ func TestNewServerWithEmitterError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) n.Peer.host = &mockHost{n.Peer.host} diff --git a/planner/create.go b/planner/create.go index a03c429da9..3333ae999e 100644 --- a/planner/create.go +++ b/planner/create.go @@ -78,7 +78,11 @@ func (n *createNode) Next() (bool, error) { return false, nil } - if err := n.collection.WithTxn(n.p.txn).Create(n.p.ctx, n.doc); err != nil { + if err := n.collection.WithTxn(n.p.txn).Create( + n.p.ctx, + n.p.identity, + n.doc, + ); err != nil { return false, err } diff --git a/planner/delete.go b/planner/delete.go index 63cdec9a6f..74bb14d202 100644 --- a/planner/delete.go +++ b/planner/delete.go @@ -53,7 +53,11 @@ func (n *deleteNode) Next() (bool, error) { if err != nil { return false, err } - _, err = n.collection.DeleteWithDocID(n.p.ctx, docID) + _, err = n.collection.DeleteWithDocID( + n.p.ctx, + n.p.identity, + docID, + ) if err != nil { return false, err } diff --git a/planner/planner.go b/planner/planner.go index b2d9bc47a9..eca0168671 100644 --- a/planner/planner.go +++ b/planner/planner.go @@ -13,6 +13,9 @@ package planner import ( "context" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/connor" @@ -82,17 +85,27 @@ type PlanContext struct { // Planner combines session state and database state to // produce a request plan, which is run by the execution context. type Planner struct { - txn datastore.Txn - db client.Store + txn datastore.Txn + identity immutable.Option[string] + acp immutable.Option[acp.ACP] + db client.Store ctx context.Context } -func New(ctx context.Context, db client.Store, txn datastore.Txn) *Planner { +func New( + ctx context.Context, + identity immutable.Option[string], + acp immutable.Option[acp.ACP], + db client.Store, + txn datastore.Txn, +) *Planner { return &Planner{ - txn: txn, - db: db, - ctx: ctx, + txn: txn, + identity: identity, + acp: acp, + db: db, + ctx: ctx, } } diff --git a/planner/scan.go b/planner/scan.go index 3ba0dd03cb..e52b3c2131 100644 --- a/planner/scan.go +++ b/planner/scan.go @@ -64,7 +64,9 @@ func (n *scanNode) Init() error { // init the fetcher if err := n.fetcher.Init( n.p.ctx, + n.p.identity, n.p.txn, + n.p.acp, n.col, n.fields, n.filter, diff --git a/planner/update.go b/planner/update.go index 077ceb39e4..b86c616dbb 100644 --- a/planner/update.go +++ b/planner/update.go @@ -72,7 +72,7 @@ func (n *updateNode) Next() (bool, error) { if err != nil { return false, err } - _, err = n.collection.UpdateWithDocID(n.p.ctx, docID, string(patch)) + _, err = n.collection.UpdateWithDocID(n.p.ctx, n.p.identity, docID, string(patch)) if err != nil { return false, err } diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index 3786d41209..00a4f6503c 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -108,6 +108,8 @@ func collectionFromAstDefinition( }, } + policyDescription := immutable.None[client.PolicyDescription]() + indexDescriptions := []client.IndexDescription{} for _, field := range def.Fields { tmpFieldsDescriptions, err := fieldsFromAST(field, relationManager, def.Name.Value) @@ -147,12 +149,20 @@ func collectionFromAstDefinition( } indexDescriptions = append(indexDescriptions, index) } + if directive.Name.Value == types.PolicySchemaDirectiveLabel { + policy, err := policyFromAST(directive) + if err != nil { + return client.CollectionDefinition{}, err + } + policyDescription = immutable.Some(policy) + } } return client.CollectionDefinition{ Description: client.CollectionDescription{ Name: immutable.Some(def.Name.Value), Indexes: indexDescriptions, + Policy: policyDescription, }, Schema: client.SchemaDescription{ Name: def.Name.Value, @@ -383,6 +393,31 @@ func fieldsFromAST(field *ast.FieldDefinition, return fieldDescriptions, nil } +// policyFromAST returns the policy description after parsing but the validation +// is not done yet on the values that are returned. This is because we need acp to do that. +func policyFromAST(directive *ast.Directive) (client.PolicyDescription, error) { + policyDesc := client.PolicyDescription{} + for _, arg := range directive.Arguments { + switch arg.Name.Value { + case types.PolicySchemaDirectivePropID: + policyIDProp, ok := arg.Value.(*ast.StringValue) + if !ok { + return client.PolicyDescription{}, ErrPolicyInvalidIDProp + } + policyDesc.ID = policyIDProp.Value + case types.PolicySchemaDirectivePropResource: + policyResourceProp, ok := arg.Value.(*ast.StringValue) + if !ok { + return client.PolicyDescription{}, ErrPolicyInvalidResourceProp + } + policyDesc.ResourceName = policyResourceProp.Value + default: + return client.PolicyDescription{}, ErrPolicyWithUnknownArg + } + } + return policyDesc, nil +} + func setCRDTType(field *ast.FieldDefinition, kind client.FieldKind) (client.CType, error) { if directive, exists := findDirective(field, "crdt"); exists { for _, arg := range directive.Arguments { diff --git a/request/graphql/schema/errors.go b/request/graphql/schema/errors.go index e832e687ee..01f6e9bc9b 100644 --- a/request/graphql/schema/errors.go +++ b/request/graphql/schema/errors.go @@ -27,6 +27,9 @@ const ( errIndexUnknownArgument string = "index with unknown argument" errIndexInvalidArgument string = "index with invalid argument" errIndexInvalidName string = "index with invalid name" + errPolicyUnknownArgument string = "policy with unknown argument" + errPolicyInvalidIDProp string = "policy directive with invalid id property" + errPolicyInvalidResourceProp string = "policy directive with invalid resource property" errViewRelationMustBeOneSided string = "relations in views must only be defined on one schema" ) @@ -51,6 +54,9 @@ var ( ErrIndexMissingFields = errors.New(errIndexMissingFields) ErrIndexWithUnknownArg = errors.New(errIndexUnknownArgument) ErrIndexWithInvalidArg = errors.New(errIndexInvalidArgument) + ErrPolicyWithUnknownArg = errors.New(errPolicyUnknownArgument) + ErrPolicyInvalidIDProp = errors.New(errPolicyInvalidIDProp) + ErrPolicyInvalidResourceProp = errors.New(errPolicyInvalidResourceProp) ErrViewRelationMustBeOneSided = errors.New(errViewRelationMustBeOneSided) ) diff --git a/request/graphql/schema/manager.go b/request/graphql/schema/manager.go index 89860d2c53..f4a2cb3e5b 100644 --- a/request/graphql/schema/manager.go +++ b/request/graphql/schema/manager.go @@ -113,6 +113,7 @@ func defaultDirectivesType() []*gql.Directive { return []*gql.Directive{ schemaTypes.CRDTFieldDirective, schemaTypes.ExplainDirective, + schemaTypes.PolicyDirective, schemaTypes.IndexDirective, schemaTypes.IndexFieldDirective, schemaTypes.PrimaryDirective, diff --git a/request/graphql/schema/types/types.go b/request/graphql/schema/types/types.go index 37cb840d05..2273e3adb9 100644 --- a/request/graphql/schema/types/types.go +++ b/request/graphql/schema/types/types.go @@ -29,6 +29,10 @@ const ( CRDTDirectiveLabel = "crdt" CRDTDirectivePropType = "type" + PolicySchemaDirectiveLabel = "policy" + PolicySchemaDirectivePropID = "id" + PolicySchemaDirectivePropResource = "resource" + IndexDirectiveLabel = "index" IndexDirectivePropName = "name" IndexDirectivePropUnique = "unique" @@ -94,6 +98,22 @@ var ( }, }) + PolicyDirective *gql.Directive = gql.NewDirective(gql.DirectiveConfig{ + Name: PolicySchemaDirectiveLabel, + Description: "@policy is a directive that can be used to link a policy on a collection type.", + Args: gql.FieldConfigArgument{ + PolicySchemaDirectivePropID: &gql.ArgumentConfig{ + Type: gql.String, + }, + PolicySchemaDirectivePropResource: &gql.ArgumentConfig{ + Type: gql.String, + }, + }, + Locations: []string{ + gql.DirectiveLocationObject, + }, + }) + IndexDirective *gql.Directive = gql.NewDirective(gql.DirectiveConfig{ Name: IndexDirectiveLabel, Description: "@index is a directive that can be used to create an index on a type.", diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index dac81d0ce2..90b9cd0768 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -22,6 +22,7 @@ import ( "github.com/sourcenetwork/badger/v4" "github.com/sourcenetwork/corelog" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/tests/bench/fixtures" @@ -170,7 +171,7 @@ func BackfillBenchmarkDB( // in place. The error check could prob use a wrap system // but its fine :). for { - if err := cols[j].Create(ctx, doc); err != nil && + if err := cols[j].Create(ctx, acpIdentity.NoIdentity, doc); err != nil && err.Error() == badger.ErrConflict.Error() { log.InfoContext( ctx, diff --git a/tests/bench/collection/utils.go b/tests/bench/collection/utils.go index a1bed37d3a..b8faa0dd50 100644 --- a/tests/bench/collection/utils.go +++ b/tests/bench/collection/utils.go @@ -17,6 +17,7 @@ import ( "sync" "testing" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" benchutils "github.com/sourcenetwork/defradb/tests/bench" @@ -72,7 +73,12 @@ func runCollectionBenchGetSync(b *testing.B, for i := 0; i < b.N; i++ { // outer benchmark loop for j := 0; j < opCount/numTypes; j++ { // number of Get operations we want to execute for k := 0; k < numTypes; k++ { // apply op to all the related types - collections[k].Get(ctx, listOfDocIDs[j][k], false) //nolint:errcheck + collections[k].Get( //nolint:errcheck + ctx, + acpIdentity.NoIdentity, + listOfDocIDs[j][k], + false, + ) } } } @@ -98,7 +104,12 @@ func runCollectionBenchGetAsync(b *testing.B, for k := 0; k < numTypes; k++ { // apply op to all the related types wg.Add(1) go func(ctx context.Context, col client.Collection, docID client.DocID) { - col.Get(ctx, docID, false) //nolint:errcheck + col.Get( //nolint:errcheck + ctx, + acpIdentity.NoIdentity, + docID, + false, + ) wg.Done() }(ctx, collections[k], listOfDocIDs[j][k]) } @@ -173,7 +184,7 @@ func runCollectionBenchCreateMany( docs[j], _ = client.NewDocFromJSON([]byte(d[0]), collections[0].Schema()) } - collections[0].CreateMany(ctx, docs) //nolint:errcheck + collections[0].CreateMany(ctx, acpIdentity.NoIdentity, docs) //nolint:errcheck } b.StopTimer() @@ -194,7 +205,7 @@ func runCollectionBenchCreateSync(b *testing.B, docs, _ := fixture.GenerateDocs() for k := 0; k < numTypes; k++ { doc, _ := client.NewDocFromJSON([]byte(docs[k]), collections[k].Schema()) - collections[k].Create(ctx, doc) //nolint:errcheck + collections[k].Create(ctx, acpIdentity.NoIdentity, doc) //nolint:errcheck } } } @@ -233,7 +244,7 @@ func runCollectionBenchCreateAsync(b *testing.B, // create the documents for j := 0; j < numTypes; j++ { doc, _ := client.NewDocFromJSON([]byte(docs[j]), collections[j].Schema()) - collections[j].Create(ctx, doc) //nolint:errcheck + collections[j].Create(ctx, acpIdentity.NoIdentity, doc) //nolint:errcheck } wg.Done() diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index fdd504175a..5bb4472840 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -15,6 +15,8 @@ import ( "fmt" "testing" + "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" @@ -78,7 +80,13 @@ func runMakePlanBench( b.ResetTimer() for i := 0; i < b.N; i++ { - planner := planner.New(ctx, db.WithTxn(txn), txn) + planner := planner.New( + ctx, + acpIdentity.NoIdentity, + acp.NoACP, + db.WithTxn(txn), + txn, + ) plan, err := planner.MakePlan(q) if err != nil { return errors.Wrap("failed to make plan", err) diff --git a/tests/bench/query/simple/utils.go b/tests/bench/query/simple/utils.go index 14752e7ae2..e4604f96a0 100644 --- a/tests/bench/query/simple/utils.go +++ b/tests/bench/query/simple/utils.go @@ -17,6 +17,7 @@ import ( "strings" "testing" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" benchutils "github.com/sourcenetwork/defradb/tests/bench" @@ -70,7 +71,7 @@ func runQueryBenchGetSync( b.ResetTimer() for i := 0; i < b.N; i++ { - res := db.ExecRequest(ctx, query) + res := db.ExecRequest(ctx, acpIdentity.NoIdentity, query) if len(res.GQL.Errors) > 0 { return errors.New(fmt.Sprintf("Query error: %v", res.GQL.Errors)) } diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 4c52a86abc..d10188d4b2 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -23,6 +23,7 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" "github.com/lens-vm/lens/host-go/config/model" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/cli" @@ -171,6 +172,28 @@ func (w *Wrapper) BasicExport(ctx context.Context, config *client.BackupConfig) return err } +func (w *Wrapper) AddPolicy( + ctx context.Context, + creator string, + policy string, +) (client.AddPolicyResult, error) { + args := []string{"client", "acp", "policy", "add"} + args = append(args, "--identity", creator) + args = append(args, policy) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return client.AddPolicyResult{}, err + } + + var addPolicyResult client.AddPolicyResult + if err := json.Unmarshal(data, &addPolicyResult); err != nil { + return client.AddPolicyResult{}, err + } + + return addPolicyResult, err +} + func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { args := []string{"client", "schema", "add"} args = append(args, schema) @@ -369,10 +392,18 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] return indexes, nil } -func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.RequestResult { +func (w *Wrapper) ExecRequest( + ctx context.Context, + identity immutable.Option[string], + query string, +) *client.RequestResult { args := []string{"client", "query"} args = append(args, query) + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + result := &client.RequestResult{} stdOut, stdErr, err := w.cmd.executeStream(args) diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index be7c3302ac..9bb8fb9938 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -56,7 +56,11 @@ func (c *Collection) Definition() client.CollectionDefinition { return c.def } -func (c *Collection) Create(ctx context.Context, doc *client.Document) error { +func (c *Collection) Create( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -64,6 +68,10 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { args := []string{"client", "collection", "create"} args = append(args, "--name", c.Description().Name.Value()) + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + document, err := doc.String() if err != nil { return err @@ -78,7 +86,11 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { return nil } -func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { +func (c *Collection) CreateMany( + ctx context.Context, + identity immutable.Option[string], + docs []*client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } @@ -86,6 +98,10 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er args := []string{"client", "collection", "create"} args = append(args, "--name", c.Description().Name.Value()) + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + docMapList := make([]map[string]any, len(docs)) for i, doc := range docs { docMap, err := doc.ToMap() @@ -110,13 +126,22 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er return nil } -func (c *Collection) Update(ctx context.Context, doc *client.Document) error { +func (c *Collection) Update( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { if !c.Description().Name.HasValue() { return client.ErrOperationNotPermittedOnNamelessCols } args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name.Value()) + + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + args = append(args, "--docID", doc.ID().String()) document, err := doc.ToJSONPatch() @@ -133,41 +158,58 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { return nil } -func (c *Collection) Save(ctx context.Context, doc *client.Document) error { - _, err := c.Get(ctx, doc.ID(), true) +func (c *Collection) Save( + ctx context.Context, + identity immutable.Option[string], + doc *client.Document, +) error { + _, err := c.Get(ctx, identity, doc.ID(), true) if err == nil { - return c.Update(ctx, doc) + return c.Update(ctx, identity, doc) } - if errors.Is(err, client.ErrDocumentNotFound) { - return c.Create(ctx, doc) + if errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) { + return c.Create(ctx, identity, doc) } return err } -func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { - res, err := c.DeleteWithDocID(ctx, docID) +func (c *Collection) Delete( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, +) (bool, error) { + res, err := c.DeleteWithDocID(ctx, identity, docID) if err != nil { return false, err } return res.Count == 1, nil } -func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { - _, err := c.Get(ctx, docID, false) +func (c *Collection) Exists( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, +) (bool, error) { + _, err := c.Get(ctx, identity, docID, false) if err != nil { return false, err } return true, nil } -func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { +func (c *Collection) UpdateWith( + ctx context.Context, + identity immutable.Option[string], + target any, + updater string, +) (*client.UpdateResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, t, updater) + return c.UpdateWithFilter(ctx, identity, t, updater) case client.DocID: - return c.UpdateWithDocID(ctx, t, updater) + return c.UpdateWithDocID(ctx, identity, t, updater) case []client.DocID: - return c.UpdateWithDocIDs(ctx, t, updater) + return c.UpdateWithDocIDs(ctx, identity, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -190,6 +232,7 @@ func (c *Collection) updateWith( func (c *Collection) UpdateWithFilter( ctx context.Context, + identity immutable.Option[string], filter any, updater string, ) (*client.UpdateResult, error) { @@ -199,6 +242,11 @@ func (c *Collection) UpdateWithFilter( args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name.Value()) + + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + args = append(args, "--updater", updater) filterJSON, err := json.Marshal(filter) @@ -212,6 +260,7 @@ func (c *Collection) UpdateWithFilter( func (c *Collection) UpdateWithDocID( ctx context.Context, + identity immutable.Option[string], docID client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -221,6 +270,11 @@ func (c *Collection) UpdateWithDocID( args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name.Value()) + + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + args = append(args, "--docID", docID.String()) args = append(args, "--updater", updater) @@ -229,6 +283,7 @@ func (c *Collection) UpdateWithDocID( func (c *Collection) UpdateWithDocIDs( ctx context.Context, + identity immutable.Option[string], docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -238,6 +293,11 @@ func (c *Collection) UpdateWithDocIDs( args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name.Value()) + + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + args = append(args, "--updater", updater) strDocIDs := make([]string, len(docIDs)) @@ -249,14 +309,18 @@ func (c *Collection) UpdateWithDocIDs( return c.updateWith(ctx, args) } -func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { +func (c *Collection) DeleteWith( + ctx context.Context, + identity immutable.Option[string], + target any, +) (*client.DeleteResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, t) + return c.DeleteWithFilter(ctx, identity, t) case client.DocID: - return c.DeleteWithDocID(ctx, t) + return c.DeleteWithDocID(ctx, identity, t) case []client.DocID: - return c.DeleteWithDocIDs(ctx, t) + return c.DeleteWithDocIDs(ctx, identity, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -277,7 +341,11 @@ func (c *Collection) deleteWith( return &res, nil } -func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithFilter( + ctx context.Context, + identity immutable.Option[string], + filter any, +) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -285,6 +353,10 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client. args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name.Value()) + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + filterJSON, err := json.Marshal(filter) if err != nil { return nil, err @@ -294,19 +366,32 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client. return c.deleteWith(ctx, args) } -func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocID( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, +) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name.Value()) + + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + args = append(args, "--docID", docID.String()) return c.deleteWith(ctx, args) } -func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocIDs( + ctx context.Context, + identity immutable.Option[string], + docIDs []client.DocID, +) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } @@ -314,6 +399,10 @@ func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name.Value()) + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + strDocIDs := make([]string, len(docIDs)) for i, v := range docIDs { strDocIDs[i] = v.String() @@ -323,13 +412,23 @@ func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID return c.deleteWith(ctx, args) } -func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get( + ctx context.Context, + identity immutable.Option[string], + docID client.DocID, + showDeleted bool, +) (*client.Document, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } args := []string{"client", "collection", "get"} args = append(args, "--name", c.Description().Name.Value()) + + if identity.HasValue() { + args = append(args, "--identity", identity.Value()) + } + args = append(args, docID.String()) if showDeleted { @@ -356,7 +455,10 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { } } -func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { +func (c *Collection) GetAllDocIDs( + ctx context.Context, + identity immutable.Option[string], +) (<-chan client.DocIDResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols } diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index 4de71c4f1f..415212b99c 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -17,6 +17,7 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" "github.com/lens-vm/lens/host-go/config/model" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" @@ -97,6 +98,14 @@ func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.Collec return w.client.AddSchema(ctx, schema) } +func (w *Wrapper) AddPolicy( + ctx context.Context, + creator string, + policy string, +) (client.AddPolicyResult, error) { + return w.client.AddPolicy(ctx, creator, policy) +} + func (w *Wrapper) PatchSchema( ctx context.Context, patch string, @@ -160,8 +169,12 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] return w.client.GetAllIndexes(ctx) } -func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.RequestResult { - return w.client.ExecRequest(ctx, query) +func (w *Wrapper) ExecRequest( + ctx context.Context, + identity immutable.Option[string], + query string, +) *client.RequestResult { + return w.client.ExecRequest(ctx, identity, query) } func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { diff --git a/tests/gen/cli/gendocs.go b/tests/gen/cli/gendocs.go index 9123bf0c2b..068af9b25a 100644 --- a/tests/gen/cli/gendocs.go +++ b/tests/gen/cli/gendocs.go @@ -19,6 +19,7 @@ import ( "github.com/spf13/cobra" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/tests/gen" @@ -121,7 +122,7 @@ func saveBatchToCollections( for colName, colDocs := range colDocsMap { for _, col := range collections { if col.Description().Name.Value() == colName { - err := col.CreateMany(context.Background(), colDocs) + err := col.CreateMany(ctx, acpIdentity.NoIdentity, colDocs) if err != nil { return err } diff --git a/tests/gen/cli/util_test.go b/tests/gen/cli/util_test.go index 7f58fbe0dd..58b2db083b 100644 --- a/tests/gen/cli/util_test.go +++ b/tests/gen/cli/util_test.go @@ -51,7 +51,7 @@ func start(ctx context.Context) (*defraInstance, error) { db, err := db.NewDB(ctx, rootstore) if err != nil { - return nil, errors.Wrap("failed to create database", err) + return nil, errors.Wrap("failed to create a database", err) } handler, err := httpapi.NewHandler(db) diff --git a/tests/integration/acp.go b/tests/integration/acp.go new file mode 100644 index 0000000000..8c4969e228 --- /dev/null +++ b/tests/integration/acp.go @@ -0,0 +1,66 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package tests + +import ( + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/require" +) + +// AddPolicy will attempt to add the given policy using DefraDB's ACP system. +type AddPolicy struct { + // NodeID may hold the ID (index) of the node we want to add policy to. + // + // If a value is not provided the policy will be added in all nodes. + NodeID immutable.Option[int] + + // The raw policy string. + Policy string + + // The policy creator, i.e. actor creating the policy. + Creator string + + // The expected policyID generated based on the Policy loaded in to the ACP system. + ExpectedPolicyID string + + // Any error expected from the action. Optional. + // + // String can be a partial, and the test will pass if an error is returned that + // contains this string. + ExpectedError string +} + +// addPolicyACP will attempt to add the given policy using DefraDB's ACP system. +func addPolicyACP( + s *state, + action AddPolicy, +) { + // If we expect an error, then ExpectedPolicyID should be empty. + if action.ExpectedError != "" && action.ExpectedPolicyID != "" { + require.Fail(s.t, "Expected error should not have an expected policyID with it.", s.testCase.Description) + } + + for _, node := range getNodes(action.NodeID, s.nodes) { + policyResult, err := node.AddPolicy( + s.ctx, + action.Creator, + action.Policy, + ) + + if err == nil { + require.Equal(s.t, action.ExpectedError, "") + require.Equal(s.t, action.ExpectedPolicyID, policyResult.PolicyID) + } + + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + } +} diff --git a/tests/integration/acp/README.md b/tests/integration/acp/README.md new file mode 100644 index 0000000000..1de6847ce4 --- /dev/null +++ b/tests/integration/acp/README.md @@ -0,0 +1,20 @@ +## More Information on ACP test directories. + + +1) `./defradb/tests/integration/acp/add_policy` + - This directory tests ONLY the `Adding of a Policy` through DefraDB. + - Does NOT assert the schema. + - Does NOT test DPI validation. + +2) `./defradb/tests/integration/acp/schema/add_dpi` + - This directory tests the loading/adding of a schema that has `@policy(id, resource)` + specified. The tests ensure that only a schema linking to + a valid DPI policy is accepted. Naturally these tests will also be `Adding a Policy` + through DefraDB like in (1) before actually adding the schema. If a schema has a + policy specified that doesn't exist (or wasn't added yet), that schema WILL/MUST + be rejected in these tests. + - The tests assert the schema after to ensure rejection/acceptance. + - Tests DPI validation. + + +### Learn more about [DPI Rules](/acp/README.md) diff --git a/tests/integration/acp/add_policy/README.md b/tests/integration/acp/add_policy/README.md new file mode 100644 index 0000000000..30b88492b9 --- /dev/null +++ b/tests/integration/acp/add_policy/README.md @@ -0,0 +1,20 @@ +## This directory tests the `Adding of a Policy` through DefraDB. + +### These are NOT DefraDB Policy Interface (DPI) Tests +There are certain requirements for a DPI. A resource must be a valid DPI to link to a collection. +However it's important to note that DefraDB does allow adding policies that might not have DPI +compliant resources. But as long as sourcehub (acp system) deems them to be valid they are allowed +to be added. There are various reasons for this, mostly because DefraDB is a tool that can be used +to upload policies to sourcehub that might not be only for use with collections / schema. Nonetheless +we still need a way to validate that the resource specified on the schema that is being added is DPI +compliant resource on a already registered policy. Therefore, when a schema is being added, and it has +the policyID and resource defined using the `@policy` directive, then during the 'adding of the schema' +the validation occurs. Inotherwords, we do not allow a non-DPI compliant resource to be specified on a +schema, if it is, then the schema is rejected. + +### Non-DPI Compliant Policies Documented In Tests +These test files document some cases where DefraDB would upload policies that aren't DPI compliant, +but are sourcehub compatible, might be worthwhile to look at the documented tests and notes there: +- `./with_no_perms_test.go` +- `./with_no_resources_test.go` +- `./with_permissionless_owner_test.go` diff --git a/tests/integration/acp/add_policy/basic_test.go b/tests/integration/acp/add_policy/basic_test.go new file mode 100644 index 0000000000..47aa351e77 --- /dev/null +++ b/tests/integration/acp/add_policy/basic_test.go @@ -0,0 +1,100 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_BasicYAML_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding basic policy in YAML format", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a basic policy that satisfies minimum DPI requirements + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_BasicJSON_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding basic policy in JSON format", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + { + "description": "a basic policy that satisfies minimum DPI requirements", + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + } + } + } + }, + "actor": { + "name": "actor" + } + } + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/fixture.go b/tests/integration/acp/add_policy/fixture.go new file mode 100644 index 0000000000..8fc2edb7cd --- /dev/null +++ b/tests/integration/acp/add_policy/fixture.go @@ -0,0 +1,18 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +var actor1Signature = acpUtils.Actor1Signature +var actor2Signature = acpUtils.Actor2Signature diff --git a/tests/integration/acp/add_policy/with_empty_args_test.go b/tests/integration/acp/add_policy/with_empty_args_test.go new file mode 100644 index 0000000000..de93019a20 --- /dev/null +++ b/tests/integration/acp/add_policy/with_empty_args_test.go @@ -0,0 +1,93 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_EmptyPolicyData_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding empty policy, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: "", + + ExpectedError: "policy data can not be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_EmptyPolicyCreator_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding policy, with empty creator, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: "", + + Policy: ` + description: a basic policy that satisfies minimum DPI requirements + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedError: "policy creator can not be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_EmptyCreatorAndPolicyArgs_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding policy, with empty policy and empty creator, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: "", + + Policy: "", + + ExpectedError: "policy creator can not be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go b/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go new file mode 100644 index 0000000000..6606b62af5 --- /dev/null +++ b/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go @@ -0,0 +1,62 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_ExtraPermissionsAndExtraRelations_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra permissions and relations, still valid", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + extra: + expr: joker + + relations: + owner: + types: + - actor + reader: + types: + - actor + joker: + types: + - actor + `, + + ExpectedPolicyID: "ecfeeebd1b65e6a21b2f1b57006176bcbc6a37ef238f27c7034953f46fe04674", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_extra_perms_test.go b/tests/integration/acp/add_policy/with_extra_perms_test.go new file mode 100644 index 0000000000..0fbcc842c0 --- /dev/null +++ b/tests/integration/acp/add_policy/with_extra_perms_test.go @@ -0,0 +1,95 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_ExtraPermissions_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra permissions, still valid", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + extra: + expr: owner + + relations: + owner: + types: + - actor + + actor: + name: actor + `, + + ExpectedPolicyID: "9d518bb2d5aceb2c8f9b12b909eecd50276c1bd0250069875f265166e6030bb5", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_ExtraDuplicatePermissions_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra duplicate permissions, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + actor: + name: actor + `, + + ExpectedError: "key \"write\" already set in map", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_extra_relations_test.go b/tests/integration/acp/add_policy/with_extra_relations_test.go new file mode 100644 index 0000000000..b8f568b5e6 --- /dev/null +++ b/tests/integration/acp/add_policy/with_extra_relations_test.go @@ -0,0 +1,107 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_ExtraRelations_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra relations, still valid", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + joker: + types: + - actor + `, + + ExpectedPolicyID: "450c47aa47b7b07820f99e5cb38170dc108a2f12b137946e6b47d0c0a73b607f", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_ExtraDuplicateRelations_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, extra duplicate relations permissions, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + joker: + types: + - actor + + joker: + types: + - actor + `, + + ExpectedError: "key \"joker\" already set in map", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go b/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go new file mode 100644 index 0000000000..1c7a29c148 --- /dev/null +++ b/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go @@ -0,0 +1,75 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_InvalidCreatorIdentityWithValidPolicy_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding policy, with invalid creator, with valid policy, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: "invalid", + + Policy: ` + description: a basic policy that satisfies minimum DPI requirements + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedError: "policy creator can not be empty", + }, + }, + } + + //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2357 + testUtils.AssertPanic(t, func() { testUtils.ExecuteTestCase(t, test) }) +} + +func TestACP_AddPolicy_InvalidCreatorIdentityWithEmptyPolicy_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding policy, with invalid creator, with empty policy, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: "invalid", + + Policy: "", + + ExpectedError: "policy data can not be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_invalid_relations_test.go b/tests/integration/acp/add_policy/with_invalid_relations_test.go new file mode 100644 index 0000000000..9184d69426 --- /dev/null +++ b/tests/integration/acp/add_policy/with_invalid_relations_test.go @@ -0,0 +1,83 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_NoRelations_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no relations, should return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + `, + + ExpectedError: "resource users: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_NoRelationsLabel_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no relations label, should return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + `, + + ExpectedError: "resource users: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_invalid_required_relation_test.go b/tests/integration/acp/add_policy/with_invalid_required_relation_test.go new file mode 100644 index 0000000000..3f3f3c4db3 --- /dev/null +++ b/tests/integration/acp/add_policy/with_invalid_required_relation_test.go @@ -0,0 +1,94 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_MissingRequiredOwnerRelation_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, missing requred owner relation, should return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: reader + read: + expr: reader + + relations: + reader: + types: + - actor + `, + + ExpectedError: "resource users: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_DuplicateOwnerRelation_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, duplicate required owner relations, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + owner: + types: + - actor + + actor: + name: actor + `, + + ExpectedError: "key \"owner\" already set in map", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_invalid_resource_test.go b/tests/integration/acp/add_policy/with_invalid_resource_test.go new file mode 100644 index 0000000000..499ff146d1 --- /dev/null +++ b/tests/integration/acp/add_policy/with_invalid_resource_test.go @@ -0,0 +1,44 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_OneResourceThatIsEmpty_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, one resource that is empty, should return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + `, + + ExpectedError: "resource users: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_managed_relation_test.go b/tests/integration/acp/add_policy/with_managed_relation_test.go new file mode 100644 index 0000000000..74f89e365a --- /dev/null +++ b/tests/integration/acp/add_policy/with_managed_relation_test.go @@ -0,0 +1,61 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_WithRelationManagingOtherRelation_ValidPolicyID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, where a relation is managing another relation, valid policy id", + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy with admin relation managing reader relation + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_multi_policies_test.go b/tests/integration/acp/add_policy/with_multi_policies_test.go new file mode 100644 index 0000000000..52b7333d34 --- /dev/null +++ b/tests/integration/acp/add_policy/with_multi_policies_test.go @@ -0,0 +1,351 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_AddMultipleDifferentPolicies_ValidPolicyIDs(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add multiple different policies", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: another policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_AddMultipleDifferentPoliciesInDifferentFmts_ValidPolicyIDs(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add multiple different policies in different formats", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + { + "description": "a policy", + "actor": { + "name": "actor" + }, + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + } + } + } + } + } + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: another policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_AddDuplicatePolicyByOtherCreator_ValidPolicyIDs(t *testing.T) { + const policyUsedByBoth string = ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + ` + + test := testUtils.TestCase{ + + Description: "Test acp, add duplicate policies by different actors, valid", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: policyUsedByBoth, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Creator: actor2Signature, + + Policy: policyUsedByBoth, + + ExpectedPolicyID: "551c57323f33decfdc23312e5e1036e3ab85d2414e962814dab9101619dd9ff9", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_AddMultipleDuplicatePolicies_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add duplicate policies, error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedError: "policy dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a: policy exists", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_AddMultipleDuplicatePoliciesDifferentFmts_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add duplicate policies different formats, error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + `, + + ExpectedPolicyID: "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a", + }, + + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + { + "description": "a policy", + "actor": { + "name": "actor" + }, + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + } + } + } + } + } + `, + + ExpectedError: "policy dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a: policy exists", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_multiple_resources_test.go b/tests/integration/acp/add_policy/with_multiple_resources_test.go new file mode 100644 index 0000000000..c939600663 --- /dev/null +++ b/tests/integration/acp/add_policy/with_multiple_resources_test.go @@ -0,0 +1,173 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_MultipleResources_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, multiple resources, valid ID", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + books: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "cf082c11fa812dddaa5093f0ccae66c2b5294efe0a2b50ffdcbc0185adf6adf1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_MultipleResourcesUsingRelationDefinedInOther_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, multiple resources using other's relation, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + books: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + `, + + ExpectedError: "resource books missing relation reader", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_SecondResourcesMissingRequiredOwner_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, multiple resources second missing required owner, return error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + books: + permissions: + write: + expr: owner + read: + expr: owner + reader + + relations: + reader: + types: + - actor + `, + + ExpectedError: "resource books: resource missing owner relation: invalid policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_no_perms_test.go b/tests/integration/acp/add_policy/with_no_perms_test.go new file mode 100644 index 0000000000..0f55851468 --- /dev/null +++ b/tests/integration/acp/add_policy/with_no_perms_test.go @@ -0,0 +1,163 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// Note: Eventhough this file shows we can load a policy, that has no permissions. It is important +// to know that DPI always has a set of permissions it requires. Therefore when a schema is loaded, +// and it has policyID and resource defined on the collection, then before we accept that schema +// the validation occurs. +// Inotherwords, we do not allow a non-DPI compliant policy to be specified on a collection schema, if +// it is the schema would be rejected. However we register the policy with acp even if +// the policy is not DPI compliant. + +func TestACP_AddPolicy_NoPermissionsOnlyOwner_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no permissions only owner relation", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "b6edfd9d24a79067a2f5960e1369499ebaf4c5ec6747e2f444f33bf9c3915fcb", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_NoPermissionsMultiRelations_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no permissions with multi relations", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + + relations: + owner: + types: + - actor + reader: + types: + - actor + + `, + + ExpectedPolicyID: "7eb7448daa631cfe33da3a149f5eea716026f54bf23ce1315c594259382c5c57", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_NoPermissionsLabelOnlyOwner_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no permissions label only owner relation", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: "b6edfd9d24a79067a2f5960e1369499ebaf4c5ec6747e2f444f33bf9c3915fcb", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_NoPermissionsLabelMultiRelations_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no permissions label with multi relations", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + relations: + owner: + types: + - actor + reader: + types: + - actor + + `, + + ExpectedPolicyID: "7eb7448daa631cfe33da3a149f5eea716026f54bf23ce1315c594259382c5c57", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_no_resources_test.go b/tests/integration/acp/add_policy/with_no_resources_test.go new file mode 100644 index 0000000000..861a77c64e --- /dev/null +++ b/tests/integration/acp/add_policy/with_no_resources_test.go @@ -0,0 +1,92 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// Eventhough empty resources make no sense from a DefraDB (DPI) perspective, +// it is still a valid sourcehub policy for now. +func TestACP_AddPolicy_NoResource_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no resource, valid policy", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + `, + + ExpectedPolicyID: "b72d8ec56ffb141922781d2b1b0803404bef57be0eeec98f1662f3017fc2de35", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Eventhough empty resources make no sense from a DefraDB (DPI) perspective, +// it is still a valid sourcehub policy for now. +func TestACP_AddPolicy_NoResourceLabel_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, no resource label, valid policy", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + `, + + ExpectedPolicyID: "b72d8ec56ffb141922781d2b1b0803404bef57be0eeec98f1662f3017fc2de35", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Eventhough empty resources make no sense from a DefraDB (DPI) perspective, +// it is still a valid sourcehub policy for now. +func TestACP_AddPolicy_PolicyWithOnlySpace_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, adding a policy that has only space", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: " ", + + ExpectedPolicyID: "b72d8ec56ffb141922781d2b1b0803404bef57be0eeec98f1662f3017fc2de35", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_perm_expr_test.go b/tests/integration/acp/add_policy/with_perm_expr_test.go new file mode 100644 index 0000000000..7af2c8e690 --- /dev/null +++ b/tests/integration/acp/add_policy/with_perm_expr_test.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithMinus_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission expr having owner in the end with minus, ValidID", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader - owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "d74384d99b6732c3a6e0e47c7b75ea19553f643bcca416380530d8ad4e50e529", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Note: this and above test both result in different policy ids. +func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithMinusNoSpace_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission expr having owner in the end with minus no space, ValidID", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader-owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "f6d5d6d8b0183230fcbdf06cfe14b611f782752d276006ad4622231eeaf60820", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go b/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go new file mode 100644 index 0000000000..c61a6e0b9c --- /dev/null +++ b/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go @@ -0,0 +1,137 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_EmptyExpressionInPermission_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission having empr expr, error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedError: "relation read: error parsing: expression needs: term", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithInocorrectSymbol_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission expr having owner in the end with incorrect symbol, error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader ^ owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedError: "error parsing expression reader ^ owner: unknown token:", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithInocorrectSymbolNoSpace_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with permission expr having owner in the end with incorrect symbol with no space, error", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader^owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedError: "error parsing expression reader^owner: unknown token:", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_permissionless_owner_test.go b/tests/integration/acp/add_policy/with_permissionless_owner_test.go new file mode 100644 index 0000000000..6496a1dec3 --- /dev/null +++ b/tests/integration/acp/add_policy/with_permissionless_owner_test.go @@ -0,0 +1,144 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// Note: Similar to the one in ./with_no_perms_test.go +// Eventhough this file shows we can load a policy, that assigns no read/write permissions which +// are required for DPI. When a schema is loaded, and it has policyID and resource defined on the +// collection, then before we accept that schema the validation occurs. Inotherwords, we do not +// allow a non-DPI compliant policy to be specified on a collection schema, if it is, then the schema +// would be rejected. However we register the policy with acp even if policy isn't DPI compliant. + +func TestACP_AddPolicy_PermissionlessOwnerWrite_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with owner having no write permissions, valid ID", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: reader + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "af1ee9ffe8558da8455dc1cfc5897028c16c038a053b4cf740dfcef8032d944a", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_PermissionlessOwnerRead_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with owner having no read permissions, valid ID", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + reader + read: + expr: reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "3ceb4a4be889998496355604b68836bc280dc26dab829af3ec45b63d7767a7f1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddPolicy_PermissionlessOwnerReadWrite_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy with owner having no read/write permissions, valid ID", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: reader + read: + expr: owner + reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: "af1ee9ffe8558da8455dc1cfc5897028c16c038a053b4cf740dfcef8032d944a", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/add_policy/with_unused_relations_test.go b/tests/integration/acp/add_policy/with_unused_relations_test.go new file mode 100644 index 0000000000..faa7658e5e --- /dev/null +++ b/tests/integration/acp/add_policy/with_unused_relations_test.go @@ -0,0 +1,58 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_add_policy + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddPolicy_UnusedRelation_ValidID(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add policy, unused relation in permissions", + + Actions: []any{ + testUtils.AddPolicy{ + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + `, + + ExpectedPolicyID: "e1bb7702f653d4f9a0595d2d97c209fc0da8f315be007bd19545599eed41ae42", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/fixture.go b/tests/integration/acp/fixture.go new file mode 100644 index 0000000000..ea0ccfc09d --- /dev/null +++ b/tests/integration/acp/fixture.go @@ -0,0 +1,14 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +var Actor1Signature = "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969" +var Actor2Signature = "cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll" diff --git a/tests/integration/acp/index/create_test.go b/tests/integration/acp/index/create_test.go new file mode 100644 index 0000000000..4a1d4c6fac --- /dev/null +++ b/tests/integration/acp/index/create_test.go @@ -0,0 +1,174 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +// This test documents that we don't allow creating indexes on collections that have policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2365 +func TestACP_IndexCreateWithSeparateRequest_OnCollectionWithPolicy_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, with creating new index using separate request on permissioned collection, error", + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: acpUtils.Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateIndex{ + CollectionID: 0, + + IndexName: "some_index", + + FieldName: "name", + + ExpectedError: "can not create index on a collection with a policy", + }, + + testUtils.Request{ + Request: ` + query { + Users { + name + age + } + }`, + + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents that we don't allow creating indexes on collections that have policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2365 +func TestACP_IndexCreateWithDirective_OnCollectionWithPolicy_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, with creating new index using directive on permissioned collection, error", + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: acpUtils.Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String @index + age: Int + } + `, + + ExpectedError: "can not create index on a collection with a policy", + }, + + testUtils.Request{ + Request: ` + query { + Users { + name + age + } + }`, + + ExpectedError: `Cannot query field "Users" on type "Query"`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/replicator_test.go b/tests/integration/acp/p2p/replicator_test.go new file mode 100644 index 0000000000..8a581347e1 --- /dev/null +++ b/tests/integration/acp/p2p/replicator_test.go @@ -0,0 +1,89 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +// This test documents that we don't allow setting replicator with a collections that has a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestACP_P2POneToOneReplicatorWithPermissionedCollection_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, with p2p replicator with permissioned collection, error", + + Actions: []any{ + + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Creator: acpUtils.Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + ExpectedError: "replicator can not use all collections, as some have policy", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/subscribe_test.go b/tests/integration/acp/p2p/subscribe_test.go new file mode 100644 index 0000000000..baf4c7cd0d --- /dev/null +++ b/tests/integration/acp/p2p/subscribe_test.go @@ -0,0 +1,99 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +// This test documents that we don't allow subscribing to a collection that has a policy +// until the following is implemented: +// TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 +func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollection_Error(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, with p2p subscribe with permissioned collection, error", + + Actions: []any{ + + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Creator: acpUtils.Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + ExpectedError: "p2p collection specified has a policy on it", + }, + + testUtils.GetAllP2PCollections{ + NodeID: 1, + ExpectedCollectionIDs: []int{}, // Note: Empty + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/register_and_delete_test.go b/tests/integration/acp/register_and_delete_test.go new file mode 100644 index 0000000000..36a2892677 --- /dev/null +++ b/tests/integration/acp/register_and_delete_test.go @@ -0,0 +1,514 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_CreateWithoutIdentityAndDeleteWithoutIdentity_CanDelete(t *testing.T) { + // The same identity that is used to do the registering/creation should be used in the + // final read check to see the state of that registered document. + // Note: In this test that identity is empty (no identity). + + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and delete without identity, can delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + DocID: 0, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithoutIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) { + // The same identity that is used to do the registering/creation should be used in the + // final read check to see the state of that registered document. + // Note: In this test that identity is empty (no identity). + + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and delete with identity, can delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: Actor1Signature, + + DocID: 0, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Signature + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and delete with identity, can delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + DocID: 0, + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndDeleteWithoutIdentity_CanNotDelete(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Signature + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and delete without identity, can not delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndDeleteWithWrongIdentity_CanNotDelete(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Signature + + WrongIdentity := Actor2Signature + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and delete without identity, can not delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: WrongIdentity, + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/register_and_read_test.go b/tests/integration/acp/register_and_read_test.go new file mode 100644 index 0000000000..d01a8835ea --- /dev/null +++ b/tests/integration/acp/register_and_read_test.go @@ -0,0 +1,457 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_CreateWithoutIdentityAndReadWithoutIdentity_CanRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and read without identity, can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithoutIdentityAndReadWithIdentity_CanRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and read with identity, can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: Actor1Signature, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndReadWithIdentity_CanRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and read with identity, can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: Actor1Signature, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: Actor1Signature, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndReadWithoutIdentity_CanNotRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and read without identity, can not read", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: Actor1Signature, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndReadWithWrongIdentity_CanNotRead(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and read without identity, can not read", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: Actor1Signature, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: Actor2Signature, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/register_and_update_test.go b/tests/integration/acp/register_and_update_test.go new file mode 100644 index 0000000000..1afa89cf86 --- /dev/null +++ b/tests/integration/acp/register_and_update_test.go @@ -0,0 +1,810 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_CreateWithoutIdentityAndUpdateWithoutIdentity_CanUpdate(t *testing.T) { + // The same identity that is used to do the registering/creation should be used in the + // final read check to see the state of that registered document. + // Note: In this test that identity is empty (no identity). + + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and update without identity, can update", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithoutIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) { + // The same identity that is used to do the registering/creation should be used in the + // final read check to see the state of that registered document. + // Note: In this test that identity is empty (no identity). + + test := testUtils.TestCase{ + + Description: "Test acp, create without identity, and update with identity, can update", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: Actor1Signature, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Signature + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update with identity, can update", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndUpdateWithoutIdentity_CanNotUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Signature + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update without identity, can not update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Signature + + WrongIdentity := Actor2Signature + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update without identity, can not update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: WrongIdentity, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This separate GQL test should be merged with the ones above when all the clients are fixed +// to behave the same in: https://github.com/sourcenetwork/defradb/issues/2410 +func TestACP_CreateWithIdentityAndUpdateWithoutIdentityGQL_CanNotUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Signature + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update without identity (gql), can not update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This separate GQL test should be merged with the ones above when all the clients are fixed +// to behave the same in: https://github.com/sourcenetwork/defradb/issues/2410 +func TestACP_CreateWithIdentityAndUpdateWithWrongIdentityGQL_CanNotUpdate(t *testing.T) { + // OwnerIdentity should be the same identity that is used to do the registering/creation, + // and the final read check to see the state of that registered document. + OwnerIdentity := Actor1Signature + + WrongIdentity := Actor2Signature + + test := testUtils.TestCase{ + + Description: "Test acp, create with identity, and update without identity (gql), can not update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: OwnerIdentity, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 0, + + Identity: OwnerIdentity, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: WrongIdentity, + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: OwnerIdentity, + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + Results: []map[string]any{ + { + "_docID": "bae-1e608f7d-b01e-5dd5-ad4a-9c6cc3005a36", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/README.md b/tests/integration/acp/schema/add_dpi/README.md new file mode 100644 index 0000000000..4bb0b065c9 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/README.md @@ -0,0 +1,7 @@ +## Accept vs Reject: +- All tests are broken into `accept_*_test.go` and `reject_*_test.go` files. +- Accepted tests are with valid DPIs (hence schema is accepted). +- Rejected tests are with invalid DPIs (hence schema is rejected). +- There are also some Partially-DPI tests that are both accepted and rejected depending on the resource. + +Learn more about the DefraDB Policy Interface [DPI](/acp/README.md) diff --git a/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go b/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go new file mode 100644 index 0000000000..2e08739176 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go @@ -0,0 +1,214 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_BasicYAML_SchemaAccepted(t *testing.T) { + policyIDOfValidDPI := "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a" + + test := testUtils.TestCase{ + + Description: "Test acp, specify basic policy that was added in YAML format, accept schema", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a basic policy that satisfies minimum DPI requirements + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_BasicJSON_SchemaAccepted(t *testing.T) { + policyIDOfValidDPI := "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a" + + test := testUtils.TestCase{ + + Description: "Test acp, specify basic policy that was added in JSON format, accept schema", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + { + "description": "a basic policy that satisfies minimum DPI requirements", + "resources": { + "users": { + "permissions": { + "read": { + "expr": "owner" + }, + "write": { + "expr": "owner" + } + }, + "relations": { + "owner": { + "types": [ + "actor" + ] + } + } + } + }, + "actor": { + "name": "actor" + } + } + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go new file mode 100644 index 0000000000..5e0bdf5b3c --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go @@ -0,0 +1,316 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_WithExtraPermsHavingRequiredRelation_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "16e39e650d4cbd5161ae0c572edad6f7e2950c1c4afa37e427af3c8708e68f0f" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with extra permissions having required relation, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + reader + magic: + expr: owner - reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_WithExtraPermsHavingRequiredRelationInTheEnd_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "35b6f3db54cfb0f451a4faba77d2c71d8718215caeb5a15a8570dfdba07b694d" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with extra permissions having required relation in the end, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + magic: + expr: reader & owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_WithExtraPermsHavingNoRequiredRelation_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "7b6266a93bfb6920bf57884f55c3823a5a5147c4ce445a9fc703b7c1e59b2d12" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with extra permissions having no required relation, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + magic: + expr: reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go new file mode 100644 index 0000000000..01c674426e --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go @@ -0,0 +1,121 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_WithManagedRelation_AcceptSchemas(t *testing.T) { + policyIDOfValidDPI := "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, where one resource is specified on different schemas, schemas accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go new file mode 100644 index 0000000000..666f4264ee --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go @@ -0,0 +1,131 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_PartialValidDPIButUseOnlyValidDPIResource_AcceptSchema(t *testing.T) { + policyIDOfPartiallyValidDPI := "d5d411825b2d8fa5a550f1e34153b88b375ed9c9af19ce6d2ba1769e237a45d0" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, has both valid & invalid resources, but use only valid resource, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Partially Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + usersValid: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + usersInvalid: + permissions: + read: + expr: reader - owner + write: + expr: reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfPartiallyValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "usersValid" + ) { + name: String + age: Int + } + `, + policyIDOfPartiallyValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go b/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go new file mode 100644 index 0000000000..19181ec1a1 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go @@ -0,0 +1,183 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_AddDuplicateDPIsByOtherCreatorsUseBoth_AcceptSchema(t *testing.T) { + const sameResourceNameOnBothDPI string = "users" + const validDPIUsedByBoth string = ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + ` + + const policyIDOfFirstCreatorsDPI string = "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + const policyIDOfSecondCreatorsDPI string = "d33aa07a28ea19ed07a5256eb7e7f5600b0e0af13254889a7fce60202c4f6c7e" + + test := testUtils.TestCase{ + + Description: "Test acp, add duplicate DPIs by different actors, accept both schemas", + + Actions: []any{ + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: validDPIUsedByBoth, + + ExpectedPolicyID: policyIDOfFirstCreatorsDPI, + }, + + testUtils.AddPolicy{ + + Creator: actor2Signature, + + Policy: validDPIUsedByBoth, + + ExpectedPolicyID: policyIDOfSecondCreatorsDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type OldUsers @policy( + id: "%s", + resource: "%s" + ) { + name: String + age: Int + } + `, + policyIDOfFirstCreatorsDPI, + sameResourceNameOnBothDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "OldUsers") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "OldUsers", // NOTE: "OldUsers" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type NewUsers @policy( + id: "%s", + resource: "%s" + ) { + name: String + age: Int + } + `, + policyIDOfSecondCreatorsDPI, + sameResourceNameOnBothDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "NewUsers") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "NewUsers", // NOTE: "NewUsers" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go new file mode 100644 index 0000000000..d8c259d57c --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go @@ -0,0 +1,281 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_WithMultipleResources_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "f3e521de628fa607ba11af0e9b53e2fb74ca0e6ea33622003d1f43dbae0ce41d" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with multiple resources, schema accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + books: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_WithMultipleResourcesBothBeingUsed_AcceptSchema(t *testing.T) { + policyIDOfValidDPI := "f3e521de628fa607ba11af0e9b53e2fb74ca0e6ea33622003d1f43dbae0ce41d" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with multiple resources both being used, schemas accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + books: + permissions: + read: + expr: owner + write: + expr: owner + + relations: + owner: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", // NOTE: "Users" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Books @policy( + id: "%s", + resource: "books" + ) { + name: String + } + `, + policyIDOfValidDPI, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Books") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Books", // NOTE: "Books" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go b/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go new file mode 100644 index 0000000000..89aab30d6f --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go @@ -0,0 +1,172 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + schemaUtils "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestACP_AddDPISchema_UseSameResourceOnDifferentSchemas_AcceptSchemas(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + sharedSameResourceName := "users" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, where one resource is specified on different schemas, schemas accepted", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type OldUsers @policy( + id: "%s", + resource: "%s" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + sharedSameResourceName, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "OldUsers") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "OldUsers", // NOTE: "OldUsers" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type NewUsers @policy( + id: "%s", + resource: "%s" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + sharedSameResourceName, + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "NewUsers") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "NewUsers", // NOTE: "NewUsers" MUST exist + "fields": schemaUtils.DefaultFields.Append( + schemaUtils.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schemaUtils.Field{ + "name": "age", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Int", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/fixture.go b/tests/integration/acp/schema/add_dpi/fixture.go new file mode 100644 index 0000000000..0202cddeaa --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/fixture.go @@ -0,0 +1,18 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +var actor1Signature = acpUtils.Actor1Signature +var actor2Signature = acpUtils.Actor2Signature diff --git a/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go new file mode 100644 index 0000000000..ea4bd2476f --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go @@ -0,0 +1,165 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_NoArgWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but no arg was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy { + name: String + age: Int + } + `, + ExpectedError: "missing policy arguments, must have both id and resource", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_SpecifiedArgsAreEmptyOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, specified args on schema are empty, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy(resource: "", id: "") { + name: String + age: Int + } + `, + ExpectedError: "missing policy arguments, must have both id and resource", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go new file mode 100644 index 0000000000..a94930424e --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go @@ -0,0 +1,169 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_InvalidPolicyIDArgTypeWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but invalid policyID arg type was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy(id: 123 , resource: "users") { + name: String + age: Int + } + `, + ExpectedError: "policy directive with invalid id property", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_InvalidResourceArgTypeWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but invalid resource arg type was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy(id: "%s" , resource: 123) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + + ExpectedError: "policy directive with invalid resource property", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go new file mode 100644 index 0000000000..05dc4b8b9c --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go @@ -0,0 +1,438 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_OwnerMissingRequiredReadPermissionOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "782ffee730033ff01a3bdb05a3aa130f08c0914887378b0dfee314be6c3a8dd0" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with owner missing required read permission, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + read: + expr: r + + relations: + owner: + types: + - actor + r: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "read", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerMissingRequiredReadPermissionLabelOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "62d2d65d0304cb9a16bb4f07d1f48c7142911f73bc1db6ee54cdd2c6c7949c73" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with owner missing required read permission label, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "resource is missing required permission on policy. PolicyID: %s, ResourceName: %s, Permission: %s", + policyIDOfInvalidDPI, + "users", + "read", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "f9fe33e8b2ee18a65d16bdc8017fe829ec13b0797330422639cd9dafac7b00f8" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner specified incorrectly on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader + owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "read", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnReadPermissionNoSpaceExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "08cc6bed6b9695dd47b6bf1e934ff91975db598631a55c26db9ead1393a77588" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner specified incorrectly on read permission expression (no space), reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: reader+owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "read", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_MaliciousOwnerSpecifiedOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "fff5c6fc25fbc2a9e5a7251c19b1cb950889281d656e5aeb642ce7c16f181c9b" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, malicious owner specified on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: ownerBad + write: + expr: owner + + relations: + owner: + types: + - actor + ownerBad: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "read", + "owner", + "B", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go new file mode 100644 index 0000000000..4c16683551 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go @@ -0,0 +1,273 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_OwnerRelationWithDifferenceSetOpOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "c9bb1811862ded3a4a8a931054bd99ecabde3f41231c6aa2c50e1f1f5af2b5e8" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with difference (-) set operation on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner - reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "read", + "owner", + "-", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerRelationWithIntersectionSetOpOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "7bff1d8a967df4de99f8daaa2567c660eb6e7b2c554c9a49bf831230e5d9eba6" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with intersection (&) set operation on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner & reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "read", + "owner", + "&", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerRelationWithInvalidSetOpOnReadPermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "cc2fab7c299e94e2bd9370708d26ca1262ff3b0d75f9a58d1086658cfec26c65" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with invalid set operation on read permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner - owner + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "read", + "owner", + "-", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go new file mode 100644 index 0000000000..def7044bf8 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go @@ -0,0 +1,438 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_OwnerMissingRequiredWritePermissionOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "4256d2b54767cafd0e0a2b39a6faebf44bc99a7fc74ff5b51894f7accf2ef638" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with owner missing required write permission, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + write: + expr: w + read: + expr: owner + + relations: + owner: + types: + - actor + w: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "write", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerMissingRequiredWritePermissionLabelOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "e8be944571cd6b52faa1e8b75fa339a9f60065b65d78ed126d037722e2512593" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with owner missing required write permission label, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "resource is missing required permission on policy. PolicyID: %s, ResourceName: %s, Permission: %s", + policyIDOfInvalidDPI, + "users", + "write", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "34ff30cb9e80993e2b11f86f85c6daa7cd9bf25724e4d5ff0704518d7970d074" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner specified incorrectly on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: writer + owner + + relations: + owner: + types: + - actor + writer: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "write", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnWritePermissionNoSpaceExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "2e9fc5805b0442e856e9893fea0f4759d333e442856a230ed741b88670e6426c" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner specified incorrectly on write permission expression (no space), reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: writer+owner + + relations: + owner: + types: + - actor + writer: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "write", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_MaliciousOwnerSpecifiedOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "3bcd650ac1e69d5efe6c930d05420231a0a69e6018d0f1015e0ecef9869d8dd5" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, malicious owner specified on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: ownerBad + + relations: + owner: + types: + - actor + ownerBad: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "write", + "owner", + "B", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go new file mode 100644 index 0000000000..56712afd9f --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go @@ -0,0 +1,273 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_OwnerRelationWithDifferenceSetOpOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "2e14b379df6008ba577a11ac47d59c09eb0146afc5453e1ac0f40178ac3f5720" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with difference (-) set operation on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner - reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "write", + "owner", + "-", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerRelationWithIntersectionSetOpOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "143546c4da209d67466690bf749899c37cd956f64c128ea7cca0662688f832ac" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with intersection (&) set operation on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner & reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "write", + "owner", + "&", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_OwnerRelationWithInvalidSetOpOnWritePermissionExprOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "b9b4e941be904b0472ab6031628ce08ae4f87314e68972a6cfc114ed449820a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, owner relation with invalid set operation on write permission expression, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: a policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + write: + expr: owner - owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission has invalid character after relation. Permission: %s, Relation: %s, Character: %s", + "write", + "owner", + "-", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go new file mode 100644 index 0000000000..dc513f9827 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go @@ -0,0 +1,149 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_WhereNoPolicyWasAdded_SchemaRejected(t *testing.T) { + nonExistingPolicyID := "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but no policy was added, reject schema", + + Actions: []any{ + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + nonExistingPolicyID, + ), + + ExpectedError: "policyID specified does not exist with acp", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_WhereAPolicyWasAddedButLinkedPolicyWasNotAdded_SchemaRejected(t *testing.T) { + policyAdded := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + incorrectPolicyID := "dfe202ffb4f0fe9b46157c313213a3839e08a6f0a7c3aba55e4724cb49ffde8a" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but specify incorrect policy ID, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyAdded, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + incorrectPolicyID, + ), + + ExpectedError: "policyID specified does not exist with acp", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go new file mode 100644 index 0000000000..5b72fcea96 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go @@ -0,0 +1,165 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_NoPolicyIDWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but no policyID was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy(resource: "users") { + name: String + age: Int + } + `, + ExpectedError: "policyID must not be empty", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_SpecifiedPolicyIDArgIsEmptyOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, specified policyID arg on schema is empty, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy(resource: "users", id: "") { + name: String + age: Int + } + `, + ExpectedError: "policyID must not be empty", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go new file mode 100644 index 0000000000..1a9b1635bc --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go @@ -0,0 +1,97 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_MissingRequiredReadPermissionOnDPI_SchemaRejected(t *testing.T) { + policyIDOfInvalidDPI := "7eb7448daa631cfe33da3a149f5eea716026f54bf23ce1315c594259382c5c57" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, with missing required read permission, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A policy + + actor: + name: actor + + resources: + users: + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfInvalidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + policyIDOfInvalidDPI, + ), + + ExpectedError: fmt.Sprintf( + "resource is missing required permission on policy. PolicyID: %s, ResourceName: %s, Permission: %s", + policyIDOfInvalidDPI, + "users", + "read", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go new file mode 100644 index 0000000000..a8296f80ca --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go @@ -0,0 +1,170 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_NoResourceWasSpecifiedOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but no resource was specified on schema, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy(id: "%s") { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + ExpectedError: "resource name must not be empty", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDPISchema_SpecifiedResourceArgIsEmptyOnSchema_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, specified resource arg on schema is empty, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy(id: "%s", resource: "") { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + ExpectedError: "resource name must not be empty", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go new file mode 100644 index 0000000000..edcbe9136a --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_SpecifiedResourceDoesNotExistOnDPI_SchemaRejected(t *testing.T) { + policyIDOfValidDPI := "4f13c5084c3d0e1e5c5db702fceef84c3b6ab948949ca8e27fcaad3fb8bc39f4" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, but specified resource does not exist on DPI, reject schema", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "doesntExist" + ) { + name: String + age: Int + } + `, + policyIDOfValidDPI, + ), + + ExpectedError: "resource does not exist on the specified policy", + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go new file mode 100644 index 0000000000..b99c254497 --- /dev/null +++ b/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go @@ -0,0 +1,117 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_schema_add_dpi + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDPISchema_PartialValidDPIButUseInValidDPIResource_RejectSchema(t *testing.T) { + policyIDOfPartiallyValidDPI := "d5d411825b2d8fa5a550f1e34153b88b375ed9c9af19ce6d2ba1769e237a45d0" + + test := testUtils.TestCase{ + + Description: "Test acp, add dpi schema, has both valid & invalid resources, but use invalid resource, schema rejected", + + Actions: []any{ + + testUtils.AddPolicy{ + + Creator: actor1Signature, + + Policy: ` + description: A Partially Valid Defra Policy Interface (DPI) + + actor: + name: actor + + resources: + usersValid: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + usersInvalid: + permissions: + read: + expr: reader - owner + write: + expr: reader + + relations: + owner: + types: + - actor + reader: + types: + - actor + `, + + ExpectedPolicyID: policyIDOfPartiallyValidDPI, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "usersInvalid" + ) { + name: String + age: Int + } + `, + policyIDOfPartiallyValidDPI, + ), + + ExpectedError: fmt.Sprintf( + "expr of required permission must start with required relation. Permission: %s, Relation: %s", + "read", + "owner", + ), + }, + + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": nil, // NOTE: No "Users" should exist. + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection/update/simple/with_doc_id_test.go b/tests/integration/collection/update/simple/with_doc_id_test.go index 6f990f7e70..7badb3b66c 100644 --- a/tests/integration/collection/update/simple/with_doc_id_test.go +++ b/tests/integration/collection/update/simple/with_doc_id_test.go @@ -16,6 +16,7 @@ import ( "github.com/stretchr/testify/assert" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) @@ -41,9 +42,12 @@ func TestUpdateWithDocID(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, doc.ID(), `{ - name: "Eric" - }`) + _, err := c.UpdateWithDocID( + ctx, + acpIdentity.NoIdentity, + doc.ID(), + `{name: "Eric"}`, + ) return err }, }, @@ -58,7 +62,7 @@ func TestUpdateWithDocID(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, doc.ID(), `"name: Eric"`) + _, err := c.UpdateWithDocID(ctx, acpIdentity.NoIdentity, doc.ID(), `"name: Eric"`) return err }, }, @@ -73,18 +77,23 @@ func TestUpdateWithDocID(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, doc.ID(), `[ - { - "name": "Eric" - }, { - "name": "Sam" - } - ]`) + _, err := c.UpdateWithDocID( + ctx, + acpIdentity.NoIdentity, + doc.ID(), + `[ + { + "name": "Eric" + }, { + "name": "Sam" + } + ]`, + ) if err != nil { return err } - d, err := c.Get(ctx, doc.ID(), false) + d, err := c.Get(ctx, acpIdentity.NoIdentity, doc.ID(), false) if err != nil { return err } @@ -109,14 +118,17 @@ func TestUpdateWithDocID(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, doc.ID(), `{ - "name": "Eric" - }`) + _, err := c.UpdateWithDocID( + ctx, + acpIdentity.NoIdentity, + doc.ID(), + `{"name": "Eric"}`, + ) if err != nil { return err } - d, err := c.Get(ctx, doc.ID(), false) + d, err := c.Get(ctx, acpIdentity.NoIdentity, doc.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/update/simple/with_doc_ids_test.go b/tests/integration/collection/update/simple/with_doc_ids_test.go index a78fa2cc29..d1b38843a5 100644 --- a/tests/integration/collection/update/simple/with_doc_ids_test.go +++ b/tests/integration/collection/update/simple/with_doc_ids_test.go @@ -16,6 +16,7 @@ import ( "github.com/stretchr/testify/assert" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) @@ -54,9 +55,12 @@ func TestUpdateWithDocIDs(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `{ - name: "Eric" - }`) + _, err := c.UpdateWithDocIDs( + ctx, + acpIdentity.NoIdentity, + []client.DocID{doc1.ID(), doc2.ID()}, + `{name: "Eric"}`, + ) return err }, }, @@ -74,7 +78,12 @@ func TestUpdateWithDocIDs(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `"name: Eric"`) + _, err := c.UpdateWithDocIDs( + ctx, + acpIdentity.NoIdentity, + []client.DocID{doc1.ID(), doc2.ID()}, + `"name: Eric"`, + ) return err }, }, @@ -92,18 +101,23 @@ func TestUpdateWithDocIDs(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `[ - { - "name": "Eric" - }, { - "name": "Bob" - } - ]`) + _, err := c.UpdateWithDocIDs( + ctx, + acpIdentity.NoIdentity, + []client.DocID{doc1.ID(), doc2.ID()}, + `[ + { + "name": "Eric" + }, { + "name": "Bob" + } + ]`, + ) if err != nil { return err } - d, err := c.Get(ctx, doc1.ID(), false) + d, err := c.Get(ctx, acpIdentity.NoIdentity, doc1.ID(), false) if err != nil { return err } @@ -115,7 +129,7 @@ func TestUpdateWithDocIDs(t *testing.T) { assert.Equal(t, "John", name) - d2, err := c.Get(ctx, doc2.ID(), false) + d2, err := c.Get(ctx, acpIdentity.NoIdentity, doc2.ID(), false) if err != nil { return err } @@ -143,14 +157,17 @@ func TestUpdateWithDocIDs(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `{ - "age": 40 - }`) + _, err := c.UpdateWithDocIDs( + ctx, + acpIdentity.NoIdentity, + []client.DocID{doc1.ID(), doc2.ID()}, + `{"age": 40}`, + ) if err != nil { return err } - d, err := c.Get(ctx, doc1.ID(), false) + d, err := c.Get(ctx, acpIdentity.NoIdentity, doc1.ID(), false) if err != nil { return err } @@ -162,7 +179,7 @@ func TestUpdateWithDocIDs(t *testing.T) { assert.Equal(t, int64(40), name) - d2, err := c.Get(ctx, doc2.ID(), false) + d2, err := c.Get(ctx, acpIdentity.NoIdentity, doc2.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/update/simple/with_filter_test.go b/tests/integration/collection/update/simple/with_filter_test.go index 1dc10b8de8..54f5b918ac 100644 --- a/tests/integration/collection/update/simple/with_filter_test.go +++ b/tests/integration/collection/update/simple/with_filter_test.go @@ -16,6 +16,7 @@ import ( "github.com/stretchr/testify/assert" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) @@ -29,9 +30,12 @@ func TestUpdateWithInvalidFilterType(t *testing.T) { func(c client.Collection) error { ctx := context.Background() // test with an invalid filter type - _, err := c.UpdateWithFilter(ctx, t, `{ - "name": "Eric" - }`) + _, err := c.UpdateWithFilter( + ctx, + acpIdentity.NoIdentity, + t, + `{"name": "Eric"}`, + ) return err }, }, @@ -51,9 +55,12 @@ func TestUpdateWithEmptyFilter(t *testing.T) { func(c client.Collection) error { ctx := context.Background() // test with an empty filter - _, err := c.UpdateWithFilter(ctx, "", `{ - "name": "Eric" - }`) + _, err := c.UpdateWithFilter( + ctx, + acpIdentity.NoIdentity, + "", + `{"name": "Eric"}`, + ) return err }, }, @@ -87,9 +94,12 @@ func TestUpdateWithFilter(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithFilter(ctx, filter, `{ - name: "Eric" - }`) + _, err := c.UpdateWithFilter( + ctx, + acpIdentity.NoIdentity, + filter, + `{name: "Eric"}`, + ) return err }, }, @@ -104,7 +114,12 @@ func TestUpdateWithFilter(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithFilter(ctx, filter, `"name: Eric"`) + _, err := c.UpdateWithFilter( + ctx, + acpIdentity.NoIdentity, + filter, + `"name: Eric"`, + ) return err }, }, @@ -119,18 +134,23 @@ func TestUpdateWithFilter(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithFilter(ctx, filter, `[ - { - "name": "Eric" - }, { - "name": "Sam" - } - ]`) + _, err := c.UpdateWithFilter( + ctx, + acpIdentity.NoIdentity, + filter, + `[ + { + "name": "Eric" + }, { + "name": "Sam" + } + ]`, + ) if err != nil { return err } - d, err := c.Get(ctx, doc.ID(), false) + d, err := c.Get(ctx, acpIdentity.NoIdentity, doc.ID(), false) if err != nil { return err } @@ -155,14 +175,17 @@ func TestUpdateWithFilter(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithFilter(ctx, filter, `{ - "name": "Eric" - }`) + _, err := c.UpdateWithFilter( + ctx, + acpIdentity.NoIdentity, + filter, + `{"name": "Eric"}`, + ) if err != nil { return err } - d, err := c.Get(ctx, doc.ID(), false) + d, err := c.Get(ctx, acpIdentity.NoIdentity, doc.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/utils.go b/tests/integration/collection/utils.go index b8bf1cf46b..eb053be594 100644 --- a/tests/integration/collection/utils.go +++ b/tests/integration/collection/utils.go @@ -17,6 +17,7 @@ import ( "github.com/stretchr/testify/assert" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" testUtils "github.com/sourcenetwork/defradb/tests/integration" @@ -90,7 +91,7 @@ func setupDatabase( if assertError(t, testCase.Description, err, testCase.ExpectedError) { return } - err = col.Save(ctx, doc) + err = col.Save(ctx, acpIdentity.NoIdentity, doc) if assertError(t, testCase.Description, err, testCase.ExpectedError) { return } diff --git a/tests/integration/collection_description/updates/remove/policy_test.go b/tests/integration/collection_description/updates/remove/policy_test.go new file mode 100644 index 0000000000..8fdff5eb8e --- /dev/null +++ b/tests/integration/collection_description/updates/remove/policy_test.go @@ -0,0 +1,82 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package remove + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +func TestColDescrUpdateRemovePolicy_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.AddPolicy{ + + Creator: acpUtils.Actor1Signature, + + Policy: ` + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + + ExpectedPolicyID: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Users @policy( + id: "53980e762616fcffbe76307995895e862f87ef3f21d509325d1dc772a770b001", + resource: "users" + ) { + name: String + age: Int + } + `, + }, + + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "remove", "path": "/1/Policy" } + ] + `, + ExpectedError: "collection policy cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/policy_test.go b/tests/integration/collection_description/updates/replace/policy_test.go new file mode 100644 index 0000000000..f71b652c59 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/policy_test.go @@ -0,0 +1,83 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplacePolicy_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Policy", "value": {} } + ] + `, + ExpectedError: "collection policy cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplacePolicyID_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Policy", "value": {"ID": "dfe202ffb4f0fe9b46157c313213a383"} } + ] + `, + ExpectedError: "collection policy cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplacePolicyResource_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users {} + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Policy", "value": {"ResourceName": "mutatingResource"} } + ] + `, + ExpectedError: "collection policy cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/db.go b/tests/integration/db.go index b103f656b3..73d8818934 100644 --- a/tests/integration/db.go +++ b/tests/integration/db.go @@ -76,6 +76,7 @@ func NewBadgerMemoryDB(ctx context.Context, dbopts ...db.Option) (client.DB, err if err != nil { return nil, err } + dbopts = append(dbopts, db.WithACPInMemory()) db, err := db.NewDB(ctx, rootstore, dbopts...) if err != nil { return nil, err @@ -84,6 +85,7 @@ func NewBadgerMemoryDB(ctx context.Context, dbopts ...db.Option) (client.DB, err } func NewInMemoryDB(ctx context.Context, dbopts ...db.Option) (client.DB, error) { + dbopts = append(dbopts, db.WithACPInMemory()) db, err := db.NewDB(ctx, memory.NewDatastore(ctx), dbopts...) if err != nil { return nil, err @@ -110,14 +112,18 @@ func NewBadgerFileDB(ctx context.Context, t testing.TB, dbopts ...db.Option) (cl opts := &badgerds.Options{ Options: badger.DefaultOptions(dbPath), } + rootstore, err := badgerds.NewDatastore(dbPath, opts) if err != nil { return nil, "", err } + + dbopts = append(dbopts, db.WithACP(dbPath)) db, err := db.NewDB(ctx, rootstore, dbopts...) if err != nil { return nil, "", err } + return db, dbPath, err } diff --git a/tests/integration/events/simple/with_create_test.go b/tests/integration/events/simple/with_create_test.go index ec5c174106..1e75687a4e 100644 --- a/tests/integration/events/simple/with_create_test.go +++ b/tests/integration/events/simple/with_create_test.go @@ -17,6 +17,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -48,11 +49,11 @@ func TestEventsSimpleWithCreate(t *testing.T) { CollectionCalls: map[string][]func(client.Collection){ "Users": []func(c client.Collection){ func(c client.Collection) { - err = c.Save(context.Background(), doc1) + err = c.Save(context.Background(), acpIdentity.NoIdentity, doc1) assert.Nil(t, err) }, func(c client.Collection) { - err = c.Save(context.Background(), doc2) + err = c.Save(context.Background(), acpIdentity.NoIdentity, doc2) assert.Nil(t, err) }, }, diff --git a/tests/integration/events/simple/with_create_txn_test.go b/tests/integration/events/simple/with_create_txn_test.go index c890792157..7ff1f838e7 100644 --- a/tests/integration/events/simple/with_create_txn_test.go +++ b/tests/integration/events/simple/with_create_txn_test.go @@ -17,6 +17,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -27,6 +28,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { func(ctx context.Context, d client.DB) { r := d.ExecRequest( ctx, + acpIdentity.NoIdentity, `mutation { create_Users(input: {name: "John"}) { _docID @@ -42,6 +44,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { assert.Nil(t, err) r := d.WithTxn(txn).ExecRequest( ctx, + acpIdentity.NoIdentity, `mutation { create_Users(input: {name: "Shahzad"}) { _docID diff --git a/tests/integration/events/simple/with_delete_test.go b/tests/integration/events/simple/with_delete_test.go index b02b2505e1..00f5a5977f 100644 --- a/tests/integration/events/simple/with_delete_test.go +++ b/tests/integration/events/simple/with_delete_test.go @@ -17,6 +17,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -37,11 +38,11 @@ func TestEventsSimpleWithDelete(t *testing.T) { CollectionCalls: map[string][]func(client.Collection){ "Users": []func(c client.Collection){ func(c client.Collection) { - err = c.Save(context.Background(), doc1) + err = c.Save(context.Background(), acpIdentity.NoIdentity, doc1) assert.Nil(t, err) }, func(c client.Collection) { - wasDeleted, err := c.Delete(context.Background(), doc1.ID()) + wasDeleted, err := c.Delete(context.Background(), acpIdentity.NoIdentity, doc1.ID()) assert.Nil(t, err) assert.True(t, wasDeleted) }, diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index 723421f91b..26c2e4363a 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -17,6 +17,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -48,17 +49,17 @@ func TestEventsSimpleWithUpdate(t *testing.T) { CollectionCalls: map[string][]func(client.Collection){ "Users": []func(c client.Collection){ func(c client.Collection) { - err = c.Save(context.Background(), doc1) + err = c.Save(context.Background(), acpIdentity.NoIdentity, doc1) assert.Nil(t, err) }, func(c client.Collection) { - err = c.Save(context.Background(), doc2) + err = c.Save(context.Background(), acpIdentity.NoIdentity, doc2) assert.Nil(t, err) }, func(c client.Collection) { // Update John doc1.Set("name", "Johnnnnn") - err = c.Save(context.Background(), doc1) + err = c.Save(context.Background(), acpIdentity.NoIdentity, doc1) assert.Nil(t, err) }, }, diff --git a/tests/integration/events/utils.go b/tests/integration/events/utils.go index d2bf418294..51299192e8 100644 --- a/tests/integration/events/utils.go +++ b/tests/integration/events/utils.go @@ -19,6 +19,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/db" testUtils "github.com/sourcenetwork/defradb/tests/integration" @@ -152,7 +153,7 @@ func setupDatabase( doc, err := client.NewDocFromJSON([]byte(docStr), col.Schema()) require.NoError(t, err) - err = col.Save(ctx, doc) + err = col.Save(ctx, acpIdentity.NoIdentity, doc) require.NoError(t, err) } } diff --git a/tests/integration/explain.go b/tests/integration/explain.go index eb44744e57..c087401588 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -20,6 +20,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) @@ -78,6 +79,9 @@ type ExplainRequest struct { // NodeID is the node ID (index) of the node in which to explain. NodeID immutable.Option[int] + // The identity of this request. + Identity string + // Has to be a valid explain request type (one of: 'simple', 'debug', 'execute', 'predict'). Request string @@ -127,7 +131,11 @@ func executeExplainRequest( } for _, node := range getNodes(action.NodeID, s.nodes) { - result := node.ExecRequest(s.ctx, action.Request) + result := node.ExecRequest( + s.ctx, + acpIdentity.NewIdentity(action.Identity), + action.Request, + ) assertExplainRequestResults(s, &result.GQL, action) } } diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go index 18d4a2e13c..16da55ce78 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go @@ -82,7 +82,7 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationSecondarySide_Err "name": "Painted House", "author": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - ExpectedError: "no document for the given ID exists", + ExpectedError: "document not found or not authorized to access", }, }, } diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go index 30545d6e7c..c693b05187 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go @@ -82,7 +82,7 @@ func TestMutationCreateOneToOne_NonExistingRelationSecondarySide_Error(t *testin "name": "Painted House", "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - ExpectedError: "no document for the given ID exists", + ExpectedError: "document not found or not authorized to access", }, }, } diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go index fdb8928964..39e132a6c6 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -184,7 +184,7 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ }`, invalidAuthorID, ), - ExpectedError: "no document for the given ID exists", + ExpectedError: "document not found or not authorized to access", }, }, } diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go index 6d38a9914d..0c05734204 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go @@ -368,7 +368,7 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t }`, invalidAuthorID, ), - ExpectedError: "no document for the given ID exists", + ExpectedError: "document not found or not authorized to access", }, }, } diff --git a/tests/integration/net/order/tcp_test.go b/tests/integration/net/order/tcp_test.go index f80701c64c..256db2f442 100644 --- a/tests/integration/net/order/tcp_test.go +++ b/tests/integration/net/order/tcp_test.go @@ -17,15 +17,15 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/net" - testutils "github.com/sourcenetwork/defradb/tests/integration" + testUtils "github.com/sourcenetwork/defradb/tests/integration" ) // TestP2PWithSingleDocumentUpdatePerNode tests document syncing between two nodes with a single update per node func TestP2PWithSingleDocumentUpdatePerNode(t *testing.T) { test := P2PTestCase{ NodeConfig: [][]net.NodeOpt{ - testutils.RandomNetworkingConfig()(), - testutils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), }, NodePeers: map[int][]int{ 1: { @@ -75,8 +75,8 @@ func TestP2PWithSingleDocumentUpdatePerNode(t *testing.T) { func TestP2PWithMultipleDocumentUpdatesPerNode(t *testing.T) { test := P2PTestCase{ NodeConfig: [][]net.NodeOpt{ - testutils.RandomNetworkingConfig()(), - testutils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), }, NodePeers: map[int][]int{ 1: { @@ -136,7 +136,7 @@ func TestP2PWithMultipleDocumentUpdatesPerNode(t *testing.T) { // TestP2FullPReplicator tests document syncing between a node and a replicator. func TestP2FullPReplicator(t *testing.T) { - colDefMap, err := testutils.ParseSDL(userCollectionGQLSchema) + colDefMap, err := testUtils.ParseSDL(userCollectionGQLSchema) require.NoError(t, err) doc, err := client.NewDocFromJSON([]byte(`{ "Name": "John", @@ -146,8 +146,8 @@ func TestP2FullPReplicator(t *testing.T) { test := P2PTestCase{ NodeConfig: [][]net.NodeOpt{ - testutils.RandomNetworkingConfig()(), - testutils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), + testUtils.RandomNetworkingConfig()(), }, NodeReplicators: map[int][]int{ 0: { diff --git a/tests/integration/net/order/utils.go b/tests/integration/net/order/utils.go index 58857e5b94..2727690281 100644 --- a/tests/integration/net/order/utils.go +++ b/tests/integration/net/order/utils.go @@ -19,6 +19,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/sourcenetwork/immutable" + + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" coreDB "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" @@ -47,6 +50,11 @@ const ( type P2PTestCase struct { Query string + + // The identity for all requests. + // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2366 - Improve in ACP <> P2P implementation + Identity string + // Configuration parameters for each peer NodeConfig [][]net.NodeOpt @@ -69,6 +77,7 @@ type P2PTestCase struct { func setupDefraNode( t *testing.T, + identity immutable.Option[string], opts []net.NodeOpt, peers []string, seeds []string, @@ -88,7 +97,7 @@ func setupDefraNode( // seed the database with a set of documents docIDs := []client.DocID{} for _, document := range seeds { - docID, err := seedDocument(ctx, db, document) + docID, err := seedDocument(ctx, identity, db, document) require.NoError(t, err) docIDs = append(docIDs, docID) } @@ -125,7 +134,12 @@ func seedSchema(ctx context.Context, db client.DB) error { return err } -func seedDocument(ctx context.Context, db client.DB, document string) (client.DocID, error) { +func seedDocument( + ctx context.Context, + identity immutable.Option[string], + db client.DB, + document string, +) (client.DocID, error) { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return client.DocID{}, err @@ -136,7 +150,7 @@ func seedDocument(ctx context.Context, db client.DB, document string) (client.Do return client.DocID{}, err } - err = col.Save(ctx, doc) + err = col.Save(ctx, identity, doc) if err != nil { return client.DocID{}, err } @@ -144,22 +158,33 @@ func seedDocument(ctx context.Context, db client.DB, document string) (client.Do return doc.ID(), nil } -func saveDocument(ctx context.Context, db client.DB, document *client.Document) error { +func saveDocument( + ctx context.Context, + identity immutable.Option[string], + db client.DB, + document *client.Document, +) error { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return err } - return col.Save(ctx, document) + return col.Save(ctx, identity, document) } -func updateDocument(ctx context.Context, db client.DB, docID client.DocID, update string) error { +func updateDocument( + ctx context.Context, + identity immutable.Option[string], + db client.DB, + docID client.DocID, + update string, +) error { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return err } - doc, err := getDocument(ctx, db, docID) + doc, err := getDocument(ctx, identity, db, docID) if err != nil { return err } @@ -168,16 +193,21 @@ func updateDocument(ctx context.Context, db client.DB, docID client.DocID, updat return err } - return col.Save(ctx, doc) + return col.Save(ctx, identity, doc) } -func getDocument(ctx context.Context, db client.DB, docID client.DocID) (*client.Document, error) { +func getDocument( + ctx context.Context, + identity immutable.Option[string], + db client.DB, + docID client.DocID, +) (*client.Document, error) { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return nil, err } - doc, err := col.Get(ctx, docID, false) + doc, err := col.Get(ctx, identity, docID, false) if err != nil { return nil, err } @@ -206,7 +236,13 @@ func executeTestCase(t *testing.T, test P2PTestCase) { ) } } - n, d, err := setupDefraNode(t, cfg, peerAddresses, test.SeedDocuments) + n, d, err := setupDefraNode( + t, + acpIdentity.NewIdentity(test.Identity), + cfg, + peerAddresses, + test.SeedDocuments, + ) require.NoError(t, err) if i == 0 { @@ -234,6 +270,8 @@ func executeTestCase(t *testing.T, test P2PTestCase) { } } + identity := acpIdentity.NewIdentity(test.Identity) + // update and sync peers for n, updateMap := range test.Updates { if n >= len(nodes) { @@ -244,7 +282,13 @@ func executeTestCase(t *testing.T, test P2PTestCase) { for d, updates := range updateMap { for _, update := range updates { log.InfoContext(ctx, fmt.Sprintf("Updating node %d with update %d", n, d)) - err := updateDocument(ctx, nodes[n].DB, docIDs[d], update) + err := updateDocument( + ctx, + identity, + nodes[n].DB, + docIDs[d], + update, + ) require.NoError(t, err) // wait for peers to sync @@ -272,7 +316,12 @@ func executeTestCase(t *testing.T, test P2PTestCase) { for d, results := range resultsMap { for field, result := range results { - doc, err := getDocument(ctx, nodes[n2].DB, docIDs[d]) + doc, err := getDocument( + ctx, + identity, + nodes[n2].DB, + docIDs[d], + ) require.NoError(t, err) val, err := doc.Get(field) @@ -304,7 +353,12 @@ func executeTestCase(t *testing.T, test P2PTestCase) { if len(test.DocumentsToReplicate) > 0 { for n, reps := range test.NodeReplicators { for _, doc := range test.DocumentsToReplicate { - err := saveDocument(ctx, nodes[n].DB, doc) + err := saveDocument( + ctx, + identity, + nodes[n].DB, + doc, + ) require.NoError(t, err) } for _, rep := range reps { @@ -318,7 +372,12 @@ func executeTestCase(t *testing.T, test P2PTestCase) { d, err := client.NewDocIDFromString(docID) require.NoError(t, err) - doc, err := getDocument(ctx, nodes[rep].DB, d) + doc, err := getDocument( + ctx, + identity, + nodes[rep].DB, + d, + ) require.NoError(t, err) val, err := doc.Get(field) diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go index 7c6a919373..0cace429ae 100644 --- a/tests/integration/p2p.go +++ b/tests/integration/p2p.go @@ -55,6 +55,12 @@ type ConfigureReplicator struct { // TargetNodeID is the node ID (index) of the node to which data should be replicated. TargetNodeID int + + // Any error expected from the action. Optional. + // + // String can be a partial, and the test will pass if an error is returned that + // contains this string. + ExpectedError string } // DeleteReplicator deletes a directional replicator relationship between two nodes. @@ -307,8 +313,12 @@ func configureReplicator( err := sourceNode.SetReplicator(s.ctx, client.Replicator{ Info: targetNode.PeerInfo(), }) - require.NoError(s.t, err) - setupReplicatorWaitSync(s, 0, cfg, sourceNode, targetNode) + + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, cfg.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, cfg.ExpectedError, expectedErrorRaised) + if err == nil { + setupReplicatorWaitSync(s, 0, cfg, sourceNode, targetNode) + } } func deleteReplicator( diff --git a/tests/integration/state.go b/tests/integration/state.go index 25a248413b..49030c82a6 100644 --- a/tests/integration/state.go +++ b/tests/integration/state.go @@ -112,5 +112,6 @@ func newState( collectionNames: collectionNames, documents: [][]*client.Document{}, indexes: [][][]client.IndexDescription{}, + isBench: false, } } diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 7cda289319..8d9315e4fa 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -205,6 +205,14 @@ type CreateDoc struct { // If a value is not provided the document will be created in all nodes. NodeID immutable.Option[int] + // The identity of this request. Optional. + // + // If an Identity is not provided the created document(s) will be public. + // + // If an Identity is provided and the collection has a policy, then the + // created document(s) will be owned by this Identity. + Identity string + // The collection in which this document should be created. CollectionID int @@ -226,6 +234,14 @@ type DeleteDoc struct { // If a value is not provided the document will be created in all nodes. NodeID immutable.Option[int] + // The identity of this request. Optional. + // + // If an Identity is not provided then can only delete public document(s). + // + // If an Identity is provided and the collection has a policy, then + // can also delete private document(s) that are owned by this Identity. + Identity string + // The collection in which this document should be deleted. CollectionID int @@ -251,6 +267,14 @@ type UpdateDoc struct { // If a value is not provided the update will be applied to all nodes. NodeID immutable.Option[int] + // The identity of this request. Optional. + // + // If an Identity is not provided then can only update public document(s). + // + // If an Identity is provided and the collection has a policy, then + // can also update private document(s) that are owned by this Identity. + Identity string + // The collection in which this document exists. CollectionID int @@ -397,6 +421,14 @@ type Request struct { // in which case the expected results must all match across all nodes. NodeID immutable.Option[int] + // The identity of this request. Optional. + // + // If an Identity is not provided then can only operate over public document(s). + // + // If an Identity is provided and the collection has a policy, then can + // operate over private document(s) that are owned by this Identity. + Identity string + // Used to identify the transaction for this to run against. Optional. TransactionID immutable.Option[int] diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 930b429119..fe79b18106 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -27,6 +27,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" @@ -278,6 +279,9 @@ func performAction( case ConfigureMigration: configureMigration(s, action) + case AddPolicy: + addPolicyACP(s, action) + case CreateDoc: createDoc(s, action) @@ -834,7 +838,12 @@ func refreshDocuments( // The document may have been mutated by other actions, so to be sure we have the latest // version without having to worry about the individual update mechanics we fetch it. - doc, err = collection.Get(s.ctx, doc.ID(), false) + doc, err = collection.Get( + s.ctx, + acpIdentity.NewIdentity(action.Identity), + doc.ID(), + false, + ) if err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway @@ -1182,7 +1191,11 @@ func createDocViaColSave( return nil, err } - return doc, collections[action.CollectionID].Save(s.ctx, doc) + return doc, collections[action.CollectionID].Save( + s.ctx, + acpIdentity.NewIdentity(action.Identity), + doc, + ) } func createDocViaColCreate( @@ -1197,7 +1210,11 @@ func createDocViaColCreate( return nil, err } - return doc, collections[action.CollectionID].Create(s.ctx, doc) + return doc, collections[action.CollectionID].Create( + s.ctx, + acpIdentity.NewIdentity(action.Identity), + doc, + ) } func createDocViaGQL( @@ -1223,7 +1240,12 @@ func createDocViaGQL( db := getStore(s, node, immutable.None[int](), action.ExpectedError) - result := db.ExecRequest(s.ctx, request) + identity := acpIdentity.NewIdentity(action.Identity) + result := db.ExecRequest( + s.ctx, + identity, + request, + ) if len(result.GQL.Errors) > 0 { return nil, result.GQL.Errors[0] } @@ -1237,7 +1259,7 @@ func createDocViaGQL( docID, err := client.NewDocIDFromString(docIDString) require.NoError(s.t, err) - doc, err := collection.Get(s.ctx, docID, false) + doc, err := collection.Get(s.ctx, identity, docID, false) require.NoError(s.t, err) return doc, nil @@ -1258,7 +1280,11 @@ func deleteDoc( actionNodes, nodeID, func() error { - _, err := collections[action.CollectionID].DeleteWithDocID(s.ctx, doc.ID()) + _, err := collections[action.CollectionID].DeleteWithDocID( + s.ctx, + acpIdentity.NewIdentity(action.Identity), + doc.ID(), + ) return err }, ) @@ -1308,7 +1334,13 @@ func updateDocViaColSave( ) error { cachedDoc := s.documents[action.CollectionID][action.DocID] - doc, err := collections[action.CollectionID].Get(s.ctx, cachedDoc.ID(), true) + identity := acpIdentity.NewIdentity(action.Identity) + doc, err := collections[action.CollectionID].Get( + s.ctx, + identity, + cachedDoc.ID(), + true, + ) if err != nil { return err } @@ -1320,7 +1352,11 @@ func updateDocViaColSave( s.documents[action.CollectionID][action.DocID] = doc - return collections[action.CollectionID].Save(s.ctx, doc) + return collections[action.CollectionID].Save( + s.ctx, + identity, + doc, + ) } func updateDocViaColUpdate( @@ -1331,7 +1367,13 @@ func updateDocViaColUpdate( ) error { cachedDoc := s.documents[action.CollectionID][action.DocID] - doc, err := collections[action.CollectionID].Get(s.ctx, cachedDoc.ID(), true) + identity := acpIdentity.NewIdentity(action.Identity) + doc, err := collections[action.CollectionID].Get( + s.ctx, + identity, + cachedDoc.ID(), + true, + ) if err != nil { return err } @@ -1343,7 +1385,11 @@ func updateDocViaColUpdate( s.documents[action.CollectionID][action.DocID] = doc - return collections[action.CollectionID].Update(s.ctx, doc) + return collections[action.CollectionID].Update( + s.ctx, + identity, + doc, + ) } func updateDocViaGQL( @@ -1371,7 +1417,11 @@ func updateDocViaGQL( db := getStore(s, node, immutable.None[int](), action.ExpectedError) - result := db.ExecRequest(s.ctx, request) + result := db.ExecRequest( + s.ctx, + acpIdentity.NewIdentity(action.Identity), + request, + ) if len(result.GQL.Errors) > 0 { return result.GQL.Errors[0] } @@ -1587,7 +1637,11 @@ func executeRequest( var expectedErrorRaised bool for nodeID, node := range getNodes(action.NodeID, s.nodes) { db := getStore(s, node, action.TransactionID, action.ExpectedError) - result := db.ExecRequest(s.ctx, action.Request) + result := db.ExecRequest( + s.ctx, + acpIdentity.NewIdentity(action.Identity), + action.Request, + ) anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( @@ -1619,7 +1673,11 @@ func executeSubscriptionRequest( subscriptionAssert := make(chan func()) for _, node := range getNodes(action.NodeID, s.nodes) { - result := node.ExecRequest(s.ctx, action.Request) + result := node.ExecRequest( + s.ctx, + acpIdentity.NoIdentity, // No Identity for subscription request. + action.Request, + ) if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) { return } @@ -1691,7 +1749,8 @@ func AssertError(t *testing.T, description string, err error, expectedError stri return false } else { if !strings.Contains(err.Error(), expectedError) { - assert.ErrorIs(t, err, errors.New(expectedError)) + // Must be require instead of assert, otherwise will show a fake "error not raised". + require.ErrorIs(t, err, errors.New(expectedError)) return false } return true @@ -1800,7 +1859,11 @@ func assertIntrospectionResults( action IntrospectionRequest, ) bool { for _, node := range getNodes(action.NodeID, s.nodes) { - result := node.ExecRequest(s.ctx, action.Request) + result := node.ExecRequest( + s.ctx, + acpIdentity.NoIdentity, // No Identity for introspection requests. + action.Request, + ) if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) { return true @@ -1831,7 +1894,11 @@ func assertClientIntrospectionResults( action ClientIntrospectionRequest, ) bool { for _, node := range getNodes(action.NodeID, s.nodes) { - result := node.ExecRequest(s.ctx, action.Request) + result := node.ExecRequest( + s.ctx, + acpIdentity.NoIdentity, // No identity for client introspection requests. + action.Request, + ) if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) { return true From 5f4b7252a28016a7b737280c87a8686a482b00d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 02:17:28 -0400 Subject: [PATCH 17/49] bot: Bump github.com/multiformats/go-multiaddr from 0.12.2 to 0.12.3 (#2480) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/multiformats/go-multiaddr](https://github.com/multiformats/go-multiaddr) from 0.12.2 to 0.12.3.
Release notes

Sourced from github.com/multiformats/go-multiaddr's releases.

v0.12.3

What's Changed

Full Changelog: https://github.com/multiformats/go-multiaddr/compare/v0.12.2...v0.12.3

Commits
  • cece70d Merge pull request #240 from multiformats/v0123
  • c73b273 Merge pull request #238 from multiformats/marco/fix-comment-2
  • 31deec9 v0.12.3
  • 1a42417 Merge pull request #239 from multiformats/marco/decapsulate-by-component
  • e70034a Decapsulate by component rather than string
  • ff645ac Expand comment
  • b6caabb Merge pull request #237 from multiformats/uci/copy-templates
  • 60f3650 chore: add or force update .github/workflows/tagpush.yml
  • 65a6928 chore: add or force update .github/workflows/release-check.yml
  • 277e163 chore: add or force update .github/workflows/releaser.yml
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/multiformats/go-multiaddr&package-manager=go_modules&previous-version=0.12.2&new-version=0.12.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 35047db29f..c86801d0ac 100644 --- a/go.mod +++ b/go.mod @@ -27,7 +27,7 @@ require ( github.com/libp2p/go-libp2p-kad-dht v0.25.2 github.com/libp2p/go-libp2p-pubsub v0.10.0 github.com/libp2p/go-libp2p-record v0.2.0 - github.com/multiformats/go-multiaddr v0.12.2 + github.com/multiformats/go-multiaddr v0.12.3 github.com/multiformats/go-multibase v0.2.0 github.com/multiformats/go-multihash v0.2.3 github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 diff --git a/go.sum b/go.sum index 925f4e9590..408f55d1b6 100644 --- a/go.sum +++ b/go.sum @@ -815,8 +815,8 @@ github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9 github.com/multiformats/go-base36 v0.2.0/go.mod h1:qvnKE++v+2MWCfePClUEjE78Z7P2a1UV0xHgWc0hkp4= github.com/multiformats/go-multiaddr v0.1.1/go.mod h1:aMKBKNEYmzmDmxfX88/vz+J5IU55txyt0p4aiWVohjo= github.com/multiformats/go-multiaddr v0.2.0/go.mod h1:0nO36NvPpyV4QzvTLi/lafl2y95ncPj0vFwVF6k6wJ4= -github.com/multiformats/go-multiaddr v0.12.2 h1:9G9sTY/wCYajKa9lyfWPmpZAwe6oV+Wb1zcmMS1HG24= -github.com/multiformats/go-multiaddr v0.12.2/go.mod h1:GKyaTYjZRdcUhyOetrxTk9z0cW+jA/YrnqTOvKgi44M= +github.com/multiformats/go-multiaddr v0.12.3 h1:hVBXvPRcKG0w80VinQ23P5t7czWgg65BmIvQKjDydU8= +github.com/multiformats/go-multiaddr v0.12.3/go.mod h1:sBXrNzucqkFJhvKOiwwLyqamGa/P5EIXNPLovyhQCII= github.com/multiformats/go-multiaddr-dns v0.3.1 h1:QgQgR+LQVt3NPTjbrLLpsaT2ufAA2y0Mkk+QRVJbW3A= github.com/multiformats/go-multiaddr-dns v0.3.1/go.mod h1:G/245BRQ6FJGmryJCrOuTdB37AMA5AMOVuO6NY3JwTk= github.com/multiformats/go-multiaddr-fmt v0.1.0 h1:WLEFClPycPkp4fnIzoFoV9FVd49/eQsuaL3/CWe167E= From 1f60c6bedb9c889db5464f325bb70f28331d9c15 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 15:26:00 -0400 Subject: [PATCH 18/49] bot: Update dependencies (bulk dependabot PRs) 03-04-2024 (#2486) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #2479 bot: Bump @types/react from 18.2.67 to 18.2.73 in /playground #2477 bot: Bump @typescript-eslint/eslint-plugin from 7.1.1 to 7.4.0 in /playground #2476 bot: Bump vite from 5.1.6 to 5.2.7 in /playground ⚠️ The following PRs were resolved manually: #2478 bot: Bump @types/react-dom from 18.2.21 to 18.2.23 in /playground #2452 bot: Bump typescript from 5.4.2 to 5.4.3 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- playground/package-lock.json | 473 ++++++++++++++++++----------------- playground/package.json | 10 +- 2 files changed, 252 insertions(+), 231 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 1b888ea97e..4c9a5cf455 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,17 +15,17 @@ "swagger-ui-react": "^5.12.0" }, "devDependencies": { - "@types/react": "^18.2.67", - "@types/react-dom": "^18.2.18", + "@types/react": "^18.2.73", + "@types/react-dom": "^18.2.23", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.1.0", + "@typescript-eslint/eslint-plugin": "^7.4.0", "@typescript-eslint/parser": "^7.3.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", - "typescript": "^5.3.3", - "vite": "^5.1.6" + "typescript": "^5.4.3", + "vite": "^5.2.7" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -112,9 +112,9 @@ "optional": true }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz", - "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", + "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", "cpu": [ "ppc64" ], @@ -128,9 +128,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz", - "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", + "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", "cpu": [ "arm" ], @@ -144,9 +144,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz", - "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", + "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", "cpu": [ "arm64" ], @@ -160,9 +160,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz", - "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", + "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", "cpu": [ "x64" ], @@ -176,9 +176,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz", - "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", + "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", "cpu": [ "arm64" ], @@ -192,9 +192,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz", - "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", + "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", "cpu": [ "x64" ], @@ -208,9 +208,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz", - "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", + "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", "cpu": [ "arm64" ], @@ -224,9 +224,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz", - "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", + "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", "cpu": [ "x64" ], @@ -240,9 +240,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz", - "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", + "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", "cpu": [ "arm" ], @@ -256,9 +256,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz", - "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", + "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", "cpu": [ "arm64" ], @@ -272,9 +272,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz", - "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", + "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", "cpu": [ "ia32" ], @@ -288,9 +288,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz", - "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", + "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", "cpu": [ "loong64" ], @@ -304,9 +304,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz", - "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", + "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", "cpu": [ "mips64el" ], @@ -320,9 +320,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz", - "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", + "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", "cpu": [ "ppc64" ], @@ -336,9 +336,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz", - "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", + "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", "cpu": [ "riscv64" ], @@ -352,9 +352,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz", - "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", + "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", "cpu": [ "s390x" ], @@ -368,9 +368,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz", - "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", + "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", "cpu": [ "x64" ], @@ -384,9 +384,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz", - "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", + "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", "cpu": [ "x64" ], @@ -400,9 +400,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz", - "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", + "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", "cpu": [ "x64" ], @@ -416,9 +416,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz", - "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", + "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", "cpu": [ "x64" ], @@ -432,9 +432,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz", - "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", + "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", "cpu": [ "arm64" ], @@ -448,9 +448,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz", - "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", + "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", "cpu": [ "ia32" ], @@ -464,9 +464,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz", - "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", + "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", "cpu": [ "x64" ], @@ -1455,9 +1455,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.12.1.tgz", - "integrity": "sha512-iU2Sya8hNn1LhsYyf0N+L4Gf9Qc+9eBTJJJsaOGUp+7x4n2M9dxTt8UvhJl3oeftSjblSlpCfvjA/IfP3g5VjQ==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.13.2.tgz", + "integrity": "sha512-3XFIDKWMFZrMnao1mJhnOT1h2g0169Os848NhhmGweEcfJ4rCi+3yMCOLG4zA61rbJdkcrM/DjVZm9Hg5p5w7g==", "cpu": [ "arm" ], @@ -1468,9 +1468,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.12.1.tgz", - "integrity": "sha512-wlzcWiH2Ir7rdMELxFE5vuM7D6TsOcJ2Yw0c3vaBR3VOsJFVTx9xvwnAvhgU5Ii8Gd6+I11qNHwndDscIm0HXg==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.13.2.tgz", + "integrity": "sha512-GdxxXbAuM7Y/YQM9/TwwP+L0omeE/lJAR1J+olu36c3LqqZEBdsIWeQ91KBe6nxwOnb06Xh7JS2U5ooWU5/LgQ==", "cpu": [ "arm64" ], @@ -1481,9 +1481,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.12.1.tgz", - "integrity": "sha512-YRXa1+aZIFN5BaImK+84B3uNK8C6+ynKLPgvn29X9s0LTVCByp54TB7tdSMHDR7GTV39bz1lOmlLDuedgTwwHg==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.13.2.tgz", + "integrity": "sha512-mCMlpzlBgOTdaFs83I4XRr8wNPveJiJX1RLfv4hggyIVhfB5mJfN4P8Z6yKh+oE4Luz+qq1P3kVdWrCKcMYrrA==", "cpu": [ "arm64" ], @@ -1494,9 +1494,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.12.1.tgz", - "integrity": "sha512-opjWJ4MevxeA8FhlngQWPBOvVWYNPFkq6/25rGgG+KOy0r8clYwL1CFd+PGwRqqMFVQ4/Qd3sQu5t7ucP7C/Uw==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.13.2.tgz", + "integrity": "sha512-yUoEvnH0FBef/NbB1u6d3HNGyruAKnN74LrPAfDQL3O32e3k3OSfLrPgSJmgb3PJrBZWfPyt6m4ZhAFa2nZp2A==", "cpu": [ "x64" ], @@ -1507,9 +1507,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.12.1.tgz", - "integrity": "sha512-uBkwaI+gBUlIe+EfbNnY5xNyXuhZbDSx2nzzW8tRMjUmpScd6lCQYKY2V9BATHtv5Ef2OBq6SChEP8h+/cxifQ==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.13.2.tgz", + "integrity": "sha512-GYbLs5ErswU/Xs7aGXqzc3RrdEjKdmoCrgzhJWyFL0r5fL3qd1NPcDKDowDnmcoSiGJeU68/Vy+OMUluRxPiLQ==", "cpu": [ "arm" ], @@ -1520,9 +1520,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.12.1.tgz", - "integrity": "sha512-0bK9aG1kIg0Su7OcFTlexkVeNZ5IzEsnz1ept87a0TUgZ6HplSgkJAnFpEVRW7GRcikT4GlPV0pbtVedOaXHQQ==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.13.2.tgz", + "integrity": "sha512-L1+D8/wqGnKQIlh4Zre9i4R4b4noxzH5DDciyahX4oOz62CphY7WDWqJoQ66zNR4oScLNOqQJfNSIAe/6TPUmQ==", "cpu": [ "arm64" ], @@ -1533,9 +1533,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.12.1.tgz", - "integrity": "sha512-qB6AFRXuP8bdkBI4D7UPUbE7OQf7u5OL+R94JE42Z2Qjmyj74FtDdLGeriRyBDhm4rQSvqAGCGC01b8Fu2LthQ==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.13.2.tgz", + "integrity": "sha512-tK5eoKFkXdz6vjfkSTCupUzCo40xueTOiOO6PeEIadlNBkadH1wNOH8ILCPIl8by/Gmb5AGAeQOFeLev7iZDOA==", "cpu": [ "arm64" ], @@ -1545,10 +1545,23 @@ "linux" ] }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.13.2.tgz", + "integrity": "sha512-zvXvAUGGEYi6tYhcDmb9wlOckVbuD+7z3mzInCSTACJ4DQrdSLPNUeDIcAQW39M3q6PDquqLWu7pnO39uSMRzQ==", + "cpu": [ + "ppc64le" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.12.1.tgz", - "integrity": "sha512-sHig3LaGlpNgDj5o8uPEoGs98RII8HpNIqFtAI8/pYABO8i0nb1QzT0JDoXF/pxzqO+FkxvwkHZo9k0NJYDedg==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.13.2.tgz", + "integrity": "sha512-C3GSKvMtdudHCN5HdmAMSRYR2kkhgdOfye4w0xzyii7lebVr4riCgmM6lRiSCnJn2w1Xz7ZZzHKuLrjx5620kw==", "cpu": [ "riscv64" ], @@ -1558,10 +1571,23 @@ "linux" ] }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.13.2.tgz", + "integrity": "sha512-l4U0KDFwzD36j7HdfJ5/TveEQ1fUTjFFQP5qIt9gBqBgu1G8/kCaq5Ok05kd5TG9F8Lltf3MoYsUMw3rNlJ0Yg==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.12.1.tgz", - "integrity": "sha512-nD3YcUv6jBJbBNFvSbp0IV66+ba/1teuBcu+fBBPZ33sidxitc6ErhON3JNavaH8HlswhWMC3s5rgZpM4MtPqQ==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.13.2.tgz", + "integrity": "sha512-xXMLUAMzrtsvh3cZ448vbXqlUa7ZL8z0MwHp63K2IIID2+DeP5iWIT6g1SN7hg1VxPzqx0xZdiDM9l4n9LRU1A==", "cpu": [ "x64" ], @@ -1572,9 +1598,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.12.1.tgz", - "integrity": "sha512-7/XVZqgBby2qp/cO0TQ8uJK+9xnSdJ9ct6gSDdEr4MfABrjTyrW6Bau7HQ73a2a5tPB7hno49A0y1jhWGDN9OQ==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.13.2.tgz", + "integrity": "sha512-M/JYAWickafUijWPai4ehrjzVPKRCyDb1SLuO+ZyPfoXgeCEAlgPkNXewFZx0zcnoIe3ay4UjXIMdXQXOZXWqA==", "cpu": [ "x64" ], @@ -1585,9 +1611,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.12.1.tgz", - "integrity": "sha512-CYc64bnICG42UPL7TrhIwsJW4QcKkIt9gGlj21gq3VV0LL6XNb1yAdHVp1pIi9gkts9gGcT3OfUYHjGP7ETAiw==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.13.2.tgz", + "integrity": "sha512-2YWwoVg9KRkIKaXSh0mz3NmfurpmYoBBTAXA9qt7VXk0Xy12PoOP40EFuau+ajgALbbhi4uTj3tSG3tVseCjuA==", "cpu": [ "arm64" ], @@ -1598,9 +1624,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.12.1.tgz", - "integrity": "sha512-LN+vnlZ9g0qlHGlS920GR4zFCqAwbv2lULrR29yGaWP9u7wF5L7GqWu9Ah6/kFZPXPUkpdZwd//TNR+9XC9hvA==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.13.2.tgz", + "integrity": "sha512-2FSsE9aQ6OWD20E498NYKEQLneShWes0NGMPQwxWOdws35qQXH+FplabOSP5zEe1pVjurSDOGEVCE2agFwSEsw==", "cpu": [ "ia32" ], @@ -1611,9 +1637,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.12.1.tgz", - "integrity": "sha512-n+vkrSyphvmU0qkQ6QBNXCGr2mKjhP08mPRM/Xp5Ck2FV4NrHU+y6axzDeixUrCBHVUS51TZhjqrKBBsHLKb2Q==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.13.2.tgz", + "integrity": "sha512-7h7J2nokcdPePdKykd8wtc8QqqkqxIrUz7MHj6aNr8waBRU//NLDVnNjQnqQO6fqtjrtCdftpbTuOKAyrAQETQ==", "cpu": [ "x64" ], @@ -2357,31 +2383,24 @@ } }, "node_modules/@types/react": { - "version": "18.2.67", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.67.tgz", - "integrity": "sha512-vkIE2vTIMHQ/xL0rgmuoECBCkZFZeHr49HeWSc24AptMbNRo7pwSBvj73rlJJs9fGKj0koS+V7kQB1jHS0uCgw==", + "version": "18.2.73", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.73.tgz", + "integrity": "sha512-XcGdod0Jjv84HOC7N5ziY3x+qL0AfmubvKOZ9hJjJ2yd5EE+KYjWhdOjt387e9HPheHkdggF9atTifMRtyAaRA==", "devOptional": true, "dependencies": { "@types/prop-types": "*", - "@types/scheduler": "*", "csstype": "^3.0.2" } }, "node_modules/@types/react-dom": { - "version": "18.2.21", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.21.tgz", - "integrity": "sha512-gnvBA/21SA4xxqNXEwNiVcP0xSGHh/gi1VhWv9Bl46a0ItbTT5nFY+G9VSQpaG/8N/qdJpJ+vftQ4zflTtnjLw==", + "version": "18.2.23", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.23.tgz", + "integrity": "sha512-ZQ71wgGOTmDYpnav2knkjr3qXdAFu0vsk8Ci5w3pGAIdj7/kKAyn+VsQDhXsmzzzepAiI9leWMmubXz690AI/A==", "devOptional": true, "dependencies": { "@types/react": "*" } }, - "node_modules/@types/scheduler": { - "version": "0.16.8", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.8.tgz", - "integrity": "sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A==", - "devOptional": true - }, "node_modules/@types/semver": { "version": "7.5.8", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", @@ -2416,16 +2435,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.1.1.tgz", - "integrity": "sha512-zioDz623d0RHNhvx0eesUmGfIjzrk18nSBC8xewepKXbBvN/7c1qImV7Hg8TI1URTxKax7/zxfxj3Uph8Chcuw==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.4.0.tgz", + "integrity": "sha512-yHMQ/oFaM7HZdVrVm/M2WHaNPgyuJH4WelkSVEWSSsir34kxW2kDJCxlXRhhGWEsMN0WAW/vLpKfKVcm8k+MPw==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "7.1.1", - "@typescript-eslint/type-utils": "7.1.1", - "@typescript-eslint/utils": "7.1.1", - "@typescript-eslint/visitor-keys": "7.1.1", + "@typescript-eslint/scope-manager": "7.4.0", + "@typescript-eslint/type-utils": "7.4.0", + "@typescript-eslint/utils": "7.4.0", + "@typescript-eslint/visitor-keys": "7.4.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2434,7 +2453,7 @@ "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2554,16 +2573,16 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.1.1.tgz", - "integrity": "sha512-cirZpA8bJMRb4WZ+rO6+mnOJrGFDd38WoXCEI57+CYBqta8Yc8aJym2i7vyqLL1vVYljgw0X27axkUXz32T8TA==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.4.0.tgz", + "integrity": "sha512-68VqENG5HK27ypafqLVs8qO+RkNc7TezCduYrx8YJpXq2QGZ30vmNZGJJJC48+MVn4G2dCV8m5ZTVnzRexTVtw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.1.1", - "@typescript-eslint/visitor-keys": "7.1.1" + "@typescript-eslint/types": "7.4.0", + "@typescript-eslint/visitor-keys": "7.4.0" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2571,18 +2590,18 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.1.1.tgz", - "integrity": "sha512-5r4RKze6XHEEhlZnJtR3GYeCh1IueUHdbrukV2KSlLXaTjuSfeVF8mZUVPLovidCuZfbVjfhi4c0DNSa/Rdg5g==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.4.0.tgz", + "integrity": "sha512-247ETeHgr9WTRMqHbbQdzwzhuyaJ8dPTuyuUEMANqzMRB1rj/9qFIuIXK7l0FX9i9FXbHeBQl/4uz6mYuCE7Aw==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.1.1", - "@typescript-eslint/utils": "7.1.1", + "@typescript-eslint/typescript-estree": "7.4.0", + "@typescript-eslint/utils": "7.4.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2598,12 +2617,12 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.1.1.tgz", - "integrity": "sha512-KhewzrlRMrgeKm1U9bh2z5aoL4s7K3tK5DwHDn8MHv0yQfWFz/0ZR6trrIHHa5CsF83j/GgHqzdbzCXJ3crx0Q==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.4.0.tgz", + "integrity": "sha512-mjQopsbffzJskos5B4HmbsadSJQWaRK0UxqQ7GuNA9Ga4bEKeiO6b2DnB6cM6bpc8lemaPseh0H9B/wyg+J7rw==", "dev": true, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2611,13 +2630,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.1.1.tgz", - "integrity": "sha512-9ZOncVSfr+sMXVxxca2OJOPagRwT0u/UHikM2Rd6L/aB+kL/QAuTnsv6MeXtjzCJYb8PzrXarypSGIPx3Jemxw==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.4.0.tgz", + "integrity": "sha512-A99j5AYoME/UBQ1ucEbbMEmGkN7SE0BvZFreSnTd1luq7yulcHdyGamZKizU7canpGDWGJ+Q6ZA9SyQobipePg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.1.1", - "@typescript-eslint/visitor-keys": "7.1.1", + "@typescript-eslint/types": "7.4.0", + "@typescript-eslint/visitor-keys": "7.4.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2626,7 +2645,7 @@ "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2639,21 +2658,21 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.1.1.tgz", - "integrity": "sha512-thOXM89xA03xAE0lW7alstvnyoBUbBX38YtY+zAUcpRPcq9EIhXPuJ0YTv948MbzmKh6e1AUszn5cBFK49Umqg==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.4.0.tgz", + "integrity": "sha512-NQt9QLM4Tt8qrlBVY9lkMYzfYtNz8/6qwZg8pI3cMGlPnj6mOpRxxAm7BMJN9K0AiY+1BwJ5lVC650YJqYOuNg==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "7.1.1", - "@typescript-eslint/types": "7.1.1", - "@typescript-eslint/typescript-estree": "7.1.1", + "@typescript-eslint/scope-manager": "7.4.0", + "@typescript-eslint/types": "7.4.0", + "@typescript-eslint/typescript-estree": "7.4.0", "semver": "^7.5.4" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -2664,16 +2683,16 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.1.1.tgz", - "integrity": "sha512-yTdHDQxY7cSoCcAtiBzVzxleJhkGB9NncSIyMYe2+OGON1ZsP9zOPws/Pqgopa65jvknOjlk/w7ulPlZ78PiLQ==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.4.0.tgz", + "integrity": "sha512-0zkC7YM0iX5Y41homUUeW1CHtZR01K3ybjM1l6QczoMuay0XKtrb93kv95AxUGwdjGr64nNqnOCwmEl616N8CA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.1.1", + "@typescript-eslint/types": "7.4.0", "eslint-visitor-keys": "^3.4.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -3288,9 +3307,9 @@ } }, "node_modules/esbuild": { - "version": "0.19.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz", - "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", + "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", "dev": true, "hasInstallScript": true, "bin": { @@ -3300,29 +3319,29 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.19.12", - "@esbuild/android-arm": "0.19.12", - "@esbuild/android-arm64": "0.19.12", - "@esbuild/android-x64": "0.19.12", - "@esbuild/darwin-arm64": "0.19.12", - "@esbuild/darwin-x64": "0.19.12", - "@esbuild/freebsd-arm64": "0.19.12", - "@esbuild/freebsd-x64": "0.19.12", - "@esbuild/linux-arm": "0.19.12", - "@esbuild/linux-arm64": "0.19.12", - "@esbuild/linux-ia32": "0.19.12", - "@esbuild/linux-loong64": "0.19.12", - "@esbuild/linux-mips64el": "0.19.12", - "@esbuild/linux-ppc64": "0.19.12", - "@esbuild/linux-riscv64": "0.19.12", - "@esbuild/linux-s390x": "0.19.12", - "@esbuild/linux-x64": "0.19.12", - "@esbuild/netbsd-x64": "0.19.12", - "@esbuild/openbsd-x64": "0.19.12", - "@esbuild/sunos-x64": "0.19.12", - "@esbuild/win32-arm64": "0.19.12", - "@esbuild/win32-ia32": "0.19.12", - "@esbuild/win32-x64": "0.19.12" + "@esbuild/aix-ppc64": "0.20.2", + "@esbuild/android-arm": "0.20.2", + "@esbuild/android-arm64": "0.20.2", + "@esbuild/android-x64": "0.20.2", + "@esbuild/darwin-arm64": "0.20.2", + "@esbuild/darwin-x64": "0.20.2", + "@esbuild/freebsd-arm64": "0.20.2", + "@esbuild/freebsd-x64": "0.20.2", + "@esbuild/linux-arm": "0.20.2", + "@esbuild/linux-arm64": "0.20.2", + "@esbuild/linux-ia32": "0.20.2", + "@esbuild/linux-loong64": "0.20.2", + "@esbuild/linux-mips64el": "0.20.2", + "@esbuild/linux-ppc64": "0.20.2", + "@esbuild/linux-riscv64": "0.20.2", + "@esbuild/linux-s390x": "0.20.2", + "@esbuild/linux-x64": "0.20.2", + "@esbuild/netbsd-x64": "0.20.2", + "@esbuild/openbsd-x64": "0.20.2", + "@esbuild/sunos-x64": "0.20.2", + "@esbuild/win32-arm64": "0.20.2", + "@esbuild/win32-ia32": "0.20.2", + "@esbuild/win32-x64": "0.20.2" } }, "node_modules/escape-string-regexp": { @@ -4930,9 +4949,9 @@ } }, "node_modules/postcss": { - "version": "8.4.35", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz", - "integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==", + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", "dev": true, "funding": [ { @@ -4951,7 +4970,7 @@ "dependencies": { "nanoid": "^3.3.7", "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" + "source-map-js": "^1.2.0" }, "engines": { "node": "^10 || ^12 || >=14" @@ -5488,9 +5507,9 @@ } }, "node_modules/rollup": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.12.1.tgz", - "integrity": "sha512-ggqQKvx/PsB0FaWXhIvVkSWh7a/PCLQAsMjBc+nA2M8Rv2/HG0X6zvixAB7KyZBRtifBUhy5k8voQX/mRnABPg==", + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.13.2.tgz", + "integrity": "sha512-MIlLgsdMprDBXC+4hsPgzWUasLO9CE4zOkj/u6j+Z6j5A4zRY+CtiXAdJyPtgCsc42g658Aeh1DlrdVEJhsL2g==", "dev": true, "dependencies": { "@types/estree": "1.0.5" @@ -5503,19 +5522,21 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.12.1", - "@rollup/rollup-android-arm64": "4.12.1", - "@rollup/rollup-darwin-arm64": "4.12.1", - "@rollup/rollup-darwin-x64": "4.12.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.12.1", - "@rollup/rollup-linux-arm64-gnu": "4.12.1", - "@rollup/rollup-linux-arm64-musl": "4.12.1", - "@rollup/rollup-linux-riscv64-gnu": "4.12.1", - "@rollup/rollup-linux-x64-gnu": "4.12.1", - "@rollup/rollup-linux-x64-musl": "4.12.1", - "@rollup/rollup-win32-arm64-msvc": "4.12.1", - "@rollup/rollup-win32-ia32-msvc": "4.12.1", - "@rollup/rollup-win32-x64-msvc": "4.12.1", + "@rollup/rollup-android-arm-eabi": "4.13.2", + "@rollup/rollup-android-arm64": "4.13.2", + "@rollup/rollup-darwin-arm64": "4.13.2", + "@rollup/rollup-darwin-x64": "4.13.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.13.2", + "@rollup/rollup-linux-arm64-gnu": "4.13.2", + "@rollup/rollup-linux-arm64-musl": "4.13.2", + "@rollup/rollup-linux-powerpc64le-gnu": "4.13.2", + "@rollup/rollup-linux-riscv64-gnu": "4.13.2", + "@rollup/rollup-linux-s390x-gnu": "4.13.2", + "@rollup/rollup-linux-x64-gnu": "4.13.2", + "@rollup/rollup-linux-x64-musl": "4.13.2", + "@rollup/rollup-win32-arm64-msvc": "4.13.2", + "@rollup/rollup-win32-ia32-msvc": "4.13.2", + "@rollup/rollup-win32-x64-msvc": "4.13.2", "fsevents": "~2.3.2" } }, @@ -5742,9 +5763,9 @@ } }, "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", "dev": true, "engines": { "node": ">=0.10.0" @@ -6077,9 +6098,9 @@ } }, "node_modules/typescript": { - "version": "5.4.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.2.tgz", - "integrity": "sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==", + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz", + "integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -6181,14 +6202,14 @@ "optional": true }, "node_modules/vite": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.1.6.tgz", - "integrity": "sha512-yYIAZs9nVfRJ/AiOLCA91zzhjsHUgMjB+EigzFb6W2XTLO8JixBCKCjvhKZaye+NKYHCrkv3Oh50dH9EdLU2RA==", + "version": "5.2.7", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.7.tgz", + "integrity": "sha512-k14PWOKLI6pMaSzAuGtT+Cf0YmIx12z9YGon39onaJNy8DLBfBJrzg9FQEmkAM5lpHBZs9wksWAsyF/HkpEwJA==", "dev": true, "dependencies": { - "esbuild": "^0.19.3", - "postcss": "^8.4.35", - "rollup": "^4.2.0" + "esbuild": "^0.20.1", + "postcss": "^8.4.38", + "rollup": "^4.13.0" }, "bin": { "vite": "bin/vite.js" diff --git a/playground/package.json b/playground/package.json index fb8253684e..74f2fb03f8 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,16 +17,16 @@ "swagger-ui-react": "^5.12.0" }, "devDependencies": { - "@types/react": "^18.2.67", - "@types/react-dom": "^18.2.18", + "@types/react": "^18.2.73", + "@types/react-dom": "^18.2.23", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.1.0", + "@typescript-eslint/eslint-plugin": "^7.4.0", "@typescript-eslint/parser": "^7.3.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", - "typescript": "^5.3.3", - "vite": "^5.1.6" + "typescript": "^5.4.3", + "vite": "^5.2.7" } } From 0fae20715fcce9f2d3e31849ce01974f8ef535db Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 19:58:21 -0400 Subject: [PATCH 19/49] bot: Update dependencies (bulk dependabot PRs) 2024-04-03 (#2492) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #2491 bot: Bump vite from 5.2.7 to 5.2.8 in /playground #2490 bot: Bump @typescript-eslint/eslint-plugin from 7.4.0 to 7.5.0 in /playground #2489 bot: Bump @types/react from 18.2.73 to 18.2.74 in /playground #2485 bot: Bump github.com/btcsuite/btcd from 0.22.1 to 0.23.2 #2484 bot: Bump github.com/cosmos/cosmos-sdk from 0.50.4 to 0.50.5 #2483 bot: Bump github.com/go-jose/go-jose/v3 from 3.0.1-0.20221117193127-916db76e8214 to 3.0.3 #2481 bot: Bump github.com/quic-go/quic-go from 0.40.1 to 0.42.0 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Fred Carle --- go.mod | 43 +++++++------- go.sum | 110 ++++++++++++++++++++--------------- playground/package-lock.json | 88 ++++++++++++++-------------- playground/package.json | 6 +- 4 files changed, 131 insertions(+), 116 deletions(-) diff --git a/go.mod b/go.mod index c86801d0ac..ff973e637a 100644 --- a/go.mod +++ b/go.mod @@ -22,7 +22,7 @@ require ( github.com/ipfs/go-log/v2 v2.5.1 github.com/jbenet/goprocess v0.1.4 github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c - github.com/libp2p/go-libp2p v0.32.2 + github.com/libp2p/go-libp2p v0.33.2 github.com/libp2p/go-libp2p-gostream v0.6.0 github.com/libp2p/go-libp2p-kad-dht v0.25.2 github.com/libp2p/go-libp2p-pubsub v0.10.0 @@ -47,9 +47,9 @@ require ( go.opentelemetry.io/otel/metric v1.24.0 go.opentelemetry.io/otel/sdk/metric v1.24.0 go.uber.org/zap v1.27.0 - golang.org/x/exp v0.0.0-20240213143201-ec583247a57a + golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 google.golang.org/grpc v1.62.1 - google.golang.org/protobuf v1.32.0 + google.golang.org/protobuf v1.33.0 ) require ( @@ -60,9 +60,9 @@ require ( cosmossdk.io/depinject v1.0.0-alpha.4 // indirect cosmossdk.io/errors v1.0.1 // indirect cosmossdk.io/log v1.3.1 // indirect - cosmossdk.io/math v1.2.0 // indirect + cosmossdk.io/math v1.3.0 // indirect cosmossdk.io/store v1.0.2 // indirect - cosmossdk.io/x/tx v0.13.0 // indirect + cosmossdk.io/x/tx v0.13.1 // indirect filippo.io/edwards25519 v1.0.0 // indirect github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect github.com/99designs/keyring v1.2.1 // indirect @@ -91,9 +91,9 @@ require ( github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect github.com/cosmos/btcutil v1.0.5 // indirect - github.com/cosmos/cosmos-db v1.0.0 // indirect + github.com/cosmos/cosmos-db v1.0.2 // indirect github.com/cosmos/cosmos-proto v1.0.0-beta.4 // indirect - github.com/cosmos/cosmos-sdk v0.50.4 // indirect + github.com/cosmos/cosmos-sdk v0.50.5 // indirect github.com/cosmos/go-bip39 v1.0.0 // indirect github.com/cosmos/gogogateway v1.2.0 // indirect github.com/cosmos/gorocksdb v1.2.0 // indirect @@ -117,11 +117,11 @@ require ( github.com/emicklei/dot v1.6.1 // indirect github.com/fatih/color v1.15.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/flynn/noise v1.0.1 // indirect + github.com/flynn/noise v1.1.0 // indirect github.com/francoispqt/gojay v1.2.13 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/getsentry/sentry-go v0.27.0 // indirect - github.com/go-jose/go-jose/v3 v3.0.1-0.20221117193127-916db76e8214 // indirect + github.com/go-jose/go-jose/v3 v3.0.3 // indirect github.com/go-kit/kit v0.12.0 // indirect github.com/go-kit/log v0.2.1 // indirect github.com/go-logfmt/logfmt v0.6.0 // indirect @@ -136,18 +136,18 @@ require ( github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/glog v1.2.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/golang/protobuf v1.5.3 // indirect + github.com/golang/protobuf v1.5.4 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/btree v1.1.2 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect github.com/google/go-cmp v0.6.0 // indirect github.com/google/gopacket v1.1.19 // indirect github.com/google/orderedcode v0.0.1 // indirect - github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42 // indirect + github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5 // indirect github.com/google/uuid v1.6.0 // indirect github.com/gorilla/handlers v1.5.2 // indirect github.com/gorilla/mux v1.8.1 // indirect - github.com/gorilla/websocket v1.5.0 // indirect + github.com/gorilla/websocket v1.5.1 // indirect github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 // indirect @@ -190,8 +190,8 @@ require ( github.com/jmhodges/levigo v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/kilic/bls12-381 v0.1.1-0.20210503002446-7b7597926c69 // indirect - github.com/klauspost/compress v1.17.6 // indirect - github.com/klauspost/cpuid/v2 v2.2.6 // indirect + github.com/klauspost/compress v1.17.7 // indirect + github.com/klauspost/cpuid/v2 v2.2.7 // indirect github.com/koron/go-ssdp v0.0.4 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect @@ -213,7 +213,7 @@ require ( github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/miekg/dns v1.1.57 // indirect + github.com/miekg/dns v1.1.58 // indirect github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc // indirect github.com/minio/highwayhash v1.0.2 // indirect @@ -232,8 +232,8 @@ require ( github.com/multiformats/go-varint v0.0.7 // indirect github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a // indirect github.com/oklog/run v1.1.0 // indirect - github.com/onsi/ginkgo/v2 v2.13.2 // indirect - github.com/opencontainers/runtime-spec v1.1.0 // indirect + github.com/onsi/ginkgo/v2 v2.15.0 // indirect + github.com/opencontainers/runtime-spec v1.2.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect github.com/pelletier/go-toml/v2 v2.1.0 // indirect @@ -249,8 +249,7 @@ require ( github.com/prometheus/common v0.47.0 // indirect github.com/prometheus/procfs v0.12.0 // indirect github.com/quic-go/qpack v0.4.0 // indirect - github.com/quic-go/qtls-go1-20 v0.4.1 // indirect - github.com/quic-go/quic-go v0.40.1 // indirect + github.com/quic-go/quic-go v0.42.0 // indirect github.com/quic-go/webtransport-go v0.6.0 // indirect github.com/raulk/go-watchdog v1.3.0 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect @@ -299,9 +298,9 @@ require ( golang.org/x/text v0.14.0 // indirect golang.org/x/tools v0.18.0 // indirect gonum.org/v1/gonum v0.14.0 // indirect - google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 // indirect + google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240221002015-b0ce06bbee7c // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect gotest.tools/v3 v3.5.1 // indirect diff --git a/go.sum b/go.sum index 408f55d1b6..a597ef0dff 100644 --- a/go.sum +++ b/go.sum @@ -16,12 +16,12 @@ cosmossdk.io/errors v1.0.1 h1:bzu+Kcr0kS/1DuPBtUFdWjzLqyUuCiyHjyJB6srBV/0= cosmossdk.io/errors v1.0.1/go.mod h1:MeelVSZThMi4bEakzhhhE/CKqVv3nOJDA25bIqRDu/U= cosmossdk.io/log v1.3.1 h1:UZx8nWIkfbbNEWusZqzAx3ZGvu54TZacWib3EzUYmGI= cosmossdk.io/log v1.3.1/go.mod h1:2/dIomt8mKdk6vl3OWJcPk2be3pGOS8OQaLUM/3/tCM= -cosmossdk.io/math v1.2.0 h1:8gudhTkkD3NxOP2YyyJIYYmt6dQ55ZfJkDOaxXpy7Ig= -cosmossdk.io/math v1.2.0/go.mod h1:l2Gnda87F0su8a/7FEKJfFdJrM0JZRXQaohlgJeyQh0= +cosmossdk.io/math v1.3.0 h1:RC+jryuKeytIiictDslBP9i1fhkVm6ZDmZEoNP316zE= +cosmossdk.io/math v1.3.0/go.mod h1:vnRTxewy+M7BtXBNFybkuhSH4WfedVAAnERHgVFhp3k= cosmossdk.io/store v1.0.2 h1:lSg5BTvJBHUDwswNNyeh4K/CbqiHER73VU4nDNb8uk0= cosmossdk.io/store v1.0.2/go.mod h1:EFtENTqVTuWwitGW1VwaBct+yDagk7oG/axBMPH+FXs= -cosmossdk.io/x/tx v0.13.0 h1:8lzyOh3zONPpZv2uTcUmsv0WTXy6T1/aCVDCqShmpzU= -cosmossdk.io/x/tx v0.13.0/go.mod h1:CpNQtmoqbXa33/DVxWQNx5Dcnbkv2xGUhL7tYQ5wUsY= +cosmossdk.io/x/tx v0.13.1 h1:Mg+EMp67Pz+NukbJqYxuo8uRp7N/a9uR+oVS9pONtj8= +cosmossdk.io/x/tx v0.13.1/go.mod h1:CBCU6fsRVz23QGFIQBb1DNX2DztJCf3jWyEkHY2nJQ0= dmitri.shuralyov.com/app/changes v0.0.0-20180602232624-0a106ad413e3/go.mod h1:Yl+fi1br7+Rr3LqpNJf1/uxUdtRUV+Tnj0o93V2B9MU= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBrvjyP0v+ecvNYvCpyZgu5/xkfAUhi6wJj28eUfSU= @@ -188,12 +188,12 @@ github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/cosmos/btcutil v1.0.5 h1:t+ZFcX77LpKtDBhjucvnOH8C2l2ioGsBNEQ3jef8xFk= github.com/cosmos/btcutil v1.0.5/go.mod h1:IyB7iuqZMJlthe2tkIFL33xPyzbFYP0XVdS8P5lUPis= -github.com/cosmos/cosmos-db v1.0.0 h1:EVcQZ+qYag7W6uorBKFPvX6gRjw6Uq2hIh4hCWjuQ0E= -github.com/cosmos/cosmos-db v1.0.0/go.mod h1:iBvi1TtqaedwLdcrZVYRSSCb6eSy61NLj4UNmdIgs0U= +github.com/cosmos/cosmos-db v1.0.2 h1:hwMjozuY1OlJs/uh6vddqnk9j7VamLv+0DBlbEXbAKs= +github.com/cosmos/cosmos-db v1.0.2/go.mod h1:Z8IXcFJ9PqKK6BIsVOB3QXtkKoqUOp1vRvPT39kOXEA= github.com/cosmos/cosmos-proto v1.0.0-beta.4 h1:aEL7tU/rLOmxZQ9z4i7mzxcLbSCY48OdY7lIWTLG7oU= github.com/cosmos/cosmos-proto v1.0.0-beta.4/go.mod h1:oeB+FyVzG3XrQJbJng0EnV8Vljfk9XvTIpGILNU/9Co= -github.com/cosmos/cosmos-sdk v0.50.4 h1:hQT5/+Z1XXNF7skaPq0i247Ts2dzzqg/j2WO/BPHSto= -github.com/cosmos/cosmos-sdk v0.50.4/go.mod h1:UbShFs6P8Ly29xxJvkNGaNaL/UGj5a686NRtb1Cqra0= +github.com/cosmos/cosmos-sdk v0.50.5 h1:MOEi+DKYgW67YaPgB+Pf+nHbD3V9S/ayitRKJYLfGIA= +github.com/cosmos/cosmos-sdk v0.50.5/go.mod h1:oV/k6GJgXV9QPoM2fsYDPPsyPBgQbdotv532O6Mz1OQ= github.com/cosmos/go-bip39 v1.0.0 h1:pcomnQdrdH22njcAatO0yWojsUnCO3y2tNoV1cb6hHY= github.com/cosmos/go-bip39 v1.0.0/go.mod h1:RNJv0H/pOIVgxw6KS7QeX2a0Uo0aKUlfhZ4xuwvCdJw= github.com/cosmos/gogogateway v1.2.0 h1:Ae/OivNhp8DqBi/sh2A8a1D0y638GpL3tkmLQAiKxTE= @@ -291,8 +291,8 @@ github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBD github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= -github.com/flynn/noise v1.0.1 h1:vPp/jdQLXC6ppsXSj/pM3W1BIJ5FEHE2TulSJBpb43Y= -github.com/flynn/noise v1.0.1/go.mod h1:xbMo+0i6+IGbYdJhF31t2eR1BIU0CYc12+BNAKwUTag= +github.com/flynn/noise v1.1.0 h1:KjPQoQCEFdZDiP03phOvGi11+SVVhBG2wOWAorLsstg= +github.com/flynn/noise v1.1.0/go.mod h1:xbMo+0i6+IGbYdJhF31t2eR1BIU0CYc12+BNAKwUTag= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/francoispqt/gojay v1.2.13 h1:d2m3sFjloqoIUQU3TsHBgj6qg/BVGlTBeHDUmyJnXKk= @@ -327,8 +327,8 @@ github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-jose/go-jose/v3 v3.0.1-0.20221117193127-916db76e8214 h1:w5li6eMV6NCHh1YVbKRM/gMCVtZ2w7mnwq78eNnHXQQ= -github.com/go-jose/go-jose/v3 v3.0.1-0.20221117193127-916db76e8214/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= @@ -427,8 +427,8 @@ github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= @@ -464,8 +464,8 @@ github.com/google/orderedcode v0.0.1 h1:UzfcAexk9Vhv8+9pNOgRu41f16lHq725vPwnSeiG github.com/google/orderedcode v0.0.1/go.mod h1:iVyU4/qPKHY5h/wSd6rZZCDcLJNxiWO6dvsYES2Sb20= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42 h1:dHLYa5D8/Ta0aLR2XcPsrkpAgGeFs6thhMcQK0oQ0n8= -github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= +github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5 h1:E/LAvt58di64hlYjx7AsNS6C/ysHWYo+2qPCZKTQhRo= +github.com/google/pprof v0.0.0-20240207164012-fb44976bdcd5/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/tink/go v1.7.0 h1:6Eox8zONGebBFcCBqkVmt60LaWZa6xg1cl/DwAh/J1w= github.com/google/tink/go v1.7.0/go.mod h1:GAUOd+QE3pgj9q8VKIGTCP33c/B7eb4NhxLcgTJZStM= @@ -487,8 +487,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= -github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= +github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= @@ -673,10 +673,10 @@ github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6 github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= -github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= -github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= -github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= +github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/koron/go-ssdp v0.0.4 h1:1IDwrghSKYM7yLf7XCzbByg2sJ/JcNOZRXS2jczTwz0= @@ -704,8 +704,8 @@ github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38y github.com/libp2p/go-cidranger v1.1.0/go.mod h1:KWZTfSr+r9qEo9OkI9/SIEeAtw+NNoU0dXIXt15Okic= github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM= github.com/libp2p/go-flow-metrics v0.1.0/go.mod h1:4Xi8MX8wj5aWNDAZttg6UPmc0ZrnFNsMtpsYUClFtro= -github.com/libp2p/go-libp2p v0.32.2 h1:s8GYN4YJzgUoyeYNPdW7JZeZ5Ee31iNaIBfGYMAY4FQ= -github.com/libp2p/go-libp2p v0.32.2/go.mod h1:E0LKe+diV/ZVJVnOJby8VC5xzHF0660osg71skcxJvk= +github.com/libp2p/go-libp2p v0.33.2 h1:vCdwnFxoGOXMKmaGHlDSnL4bM3fQeW8pgIa9DECnb40= +github.com/libp2p/go-libp2p v0.33.2/go.mod h1:zTeppLuCvUIkT118pFVzA8xzP/p2dJYOMApCkFh0Yww= github.com/libp2p/go-libp2p-asn-util v0.4.1 h1:xqL7++IKD9TBFMgnLPZR6/6iYhawHKHl950SO9L6n94= github.com/libp2p/go-libp2p-asn-util v0.4.1/go.mod h1:d/NI6XZ9qxw67b4e+NgpQexCIiFYJjErASrYW4PFDN8= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= @@ -768,8 +768,8 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5 github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/miekg/dns v1.1.57 h1:Jzi7ApEIzwEPLHWRcafCN9LZSBbqQpxjt/wpgvg7wcM= -github.com/miekg/dns v1.1.57/go.mod h1:uqRjCRUuEAA6qsOiJvDd+CFo/vW+y5WR6SNmHE55hZk= +github.com/miekg/dns v1.1.58 h1:ca2Hdkz+cDg/7eNF6V56jjzuZ4aCAE+DbVkILdQWG/4= +github.com/miekg/dns v1.1.58/go.mod h1:Ypv+3b/KadlvW9vJfXOTf300O4UqaHFzFCuHz+rPkBY= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c h1:bzE/A84HN25pxAuk9Eej1Kz9OUelF97nAc82bDquQI8= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c/go.mod h1:0SQS9kMwD2VsyFEB++InYyBJroV/FRmBgcydeSUcJms= github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b h1:z78hV3sbSMAUoyUMM0I83AUIT6Hu17AWfgjzIbtrYFc= @@ -865,16 +865,16 @@ github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vv github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= -github.com/onsi/ginkgo/v2 v2.13.2 h1:Bi2gGVkfn6gQcjNjZJVO8Gf0FHzMPf2phUei9tejVMs= -github.com/onsi/ginkgo/v2 v2.13.2/go.mod h1:XStQ8QcGwLyF4HdfcZB8SFOS/MWCgDuXMSBe6zrvLgM= +github.com/onsi/ginkgo/v2 v2.15.0 h1:79HwNRBAZHOEwrczrgSOPy+eFTTlIGELKy5as+ClttY= +github.com/onsi/ginkgo/v2 v2.15.0/go.mod h1:HlxMHtYF57y6Dpf+mc5529KKmSq9h2FpCF+/ZkwUxKM= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= -github.com/onsi/gomega v1.29.0 h1:KIA/t2t5UBzoirT4H9tsML45GEbo3ouUnBHsCfD2tVg= -github.com/onsi/gomega v1.29.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= +github.com/onsi/gomega v1.30.0 h1:hvMK7xYz4D3HapigLTeGdId/NcfQx1VHMJc60ew99+8= +github.com/onsi/gomega v1.30.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= @@ -883,8 +883,8 @@ github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVn github.com/opencontainers/runc v1.1.3 h1:vIXrkId+0/J2Ymu2m7VjGvbSlAId9XNRPhn2p4b+d8w= github.com/opencontainers/runc v1.1.3/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg= -github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.2.0 h1:z97+pHb3uELt/yiAWD691HNHQIF07bE7dzrbT927iTk= +github.com/opencontainers/runtime-spec v1.2.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= @@ -971,10 +971,8 @@ github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/quic-go/qpack v0.4.0 h1:Cr9BXA1sQS2SmDUWjSofMPNKmvF6IiIfDRmgU0w1ZCo= github.com/quic-go/qpack v0.4.0/go.mod h1:UZVnYIfi5GRk+zI9UMaCPsmZ2xKJP7XBUvVyT1Knj9A= -github.com/quic-go/qtls-go1-20 v0.4.1 h1:D33340mCNDAIKBqXuAvexTNMUByrYmFYVfKfDN5nfFs= -github.com/quic-go/qtls-go1-20 v0.4.1/go.mod h1:X9Nh97ZL80Z+bX/gUXMbipO6OxdiDi58b/fMC9mAL+k= -github.com/quic-go/quic-go v0.40.1 h1:X3AGzUNFs0jVuO3esAGnTfvdgvL4fq655WaOi1snv1Q= -github.com/quic-go/quic-go v0.40.1/go.mod h1:PeN7kuVJ4xZbxSv/4OX6S1USOX8MJvydwpTx31vx60c= +github.com/quic-go/quic-go v0.42.0 h1:uSfdap0eveIl8KXnipv9K7nlwZ5IqLlYOpJ58u5utpM= +github.com/quic-go/quic-go v0.42.0/go.mod h1:132kz4kL3F9vxhW3CtQJLDVwcFe5wdWeJXXijhsO57M= github.com/quic-go/webtransport-go v0.6.0 h1:CvNsKqc4W2HljHJnoT+rMmbRJybShZ0YPFDD3NxaZLY= github.com/quic-go/webtransport-go v0.6.0/go.mod h1:9KjU4AEBqEQidGHNDkZrb8CAa1abRaosM2yGOyiikEc= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= @@ -1170,6 +1168,7 @@ github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1: github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zondax/hid v0.9.2 h1:WCJFnEDMiqGF64nlZz28E9qLVZ0KSJ7xpc5DLEyma2U= github.com/zondax/hid v0.9.2/go.mod h1:l5wttcP0jwtdLjqjMMWFVEE7d1zO0jvSPA9OPZxWpEM= github.com/zondax/ledger-go v0.14.3 h1:wEpJt2CEcBJ428md/5MgSLsXLBos98sBOyxNmCjfUCw= @@ -1238,7 +1237,6 @@ golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -1246,13 +1244,14 @@ golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= -golang.org/x/exp v0.0.0-20240213143201-ec583247a57a h1:HinSgX1tJRX3KsL//Gxynpw5CTOAIPhgL4W8PNiIpVE= -golang.org/x/exp v0.0.0-20240213143201-ec583247a57a/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1269,6 +1268,8 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1304,6 +1305,9 @@ golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1320,6 +1324,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1379,15 +1385,19 @@ golang.org/x/sys v0.0.0-20220315194320-039c03cc5b86/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1397,11 +1407,15 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -1425,6 +1439,8 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ= golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1456,12 +1472,12 @@ google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20220314164441-57ef72a4c106/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 h1:KAeGQVN3M9nD0/bQXnr/ClcEMJ968gUXJQ9pwfSynuQ= -google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80/go.mod h1:cc8bqMqtv9gMOr0zHg2Vzff5ULhhL2IXP4sbcn32Dro= -google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 h1:Lj5rbfG876hIAYFjqiJnPHfhXbv+nzTWfm04Fg/XSVU= -google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80/go.mod h1:4jWUdICTdgc3Ibxmr8nAJiiLHwQBY0UI0XZcEMaFKaA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 h1:AjyfHzEPEFp/NpvfN5g+KDla3EMojjhRVZc1i7cj+oM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= +google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 h1:9+tzLLstTlPTRyJTh+ah5wIMsBW5c4tQwGTN3thOW9Y= +google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9/go.mod h1:mqHbVIp48Muh7Ywss/AD6I5kNVKZMmAa/QEW58Gxp2s= +google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014 h1:x9PwdEgd11LgK+orcck69WVRo7DezSO4VUMPI4xpc8A= +google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014/go.mod h1:rbHMSEDyoYX62nRVLOCc4Qt1HbsdytAYoVwgjiOhF3I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240221002015-b0ce06bbee7c h1:NUsgEN92SQQqzfA+YtqYNqYmB3DMMYLlIwUZAQFVFbo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240221002015-b0ce06bbee7c/go.mod h1:H4O17MA/PE9BsGx3w+a+W2VOLLD1Qf7oJneAoU6WktY= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= @@ -1499,8 +1515,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/playground/package-lock.json b/playground/package-lock.json index 4c9a5cf455..03f61a4776 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,17 +15,17 @@ "swagger-ui-react": "^5.12.0" }, "devDependencies": { - "@types/react": "^18.2.73", + "@types/react": "^18.2.74", "@types/react-dom": "^18.2.23", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.4.0", + "@typescript-eslint/eslint-plugin": "^7.5.0", "@typescript-eslint/parser": "^7.3.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.4.3", - "vite": "^5.2.7" + "vite": "^5.2.8" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -2383,9 +2383,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.73", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.73.tgz", - "integrity": "sha512-XcGdod0Jjv84HOC7N5ziY3x+qL0AfmubvKOZ9hJjJ2yd5EE+KYjWhdOjt387e9HPheHkdggF9atTifMRtyAaRA==", + "version": "18.2.74", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.74.tgz", + "integrity": "sha512-9AEqNZZyBx8OdZpxzQlaFEVCSFUM2YXJH46yPOiOpm078k6ZLOCcuAzGum/zK8YBwY+dbahVNbHrbgrAwIRlqw==", "devOptional": true, "dependencies": { "@types/prop-types": "*", @@ -2435,16 +2435,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.4.0.tgz", - "integrity": "sha512-yHMQ/oFaM7HZdVrVm/M2WHaNPgyuJH4WelkSVEWSSsir34kxW2kDJCxlXRhhGWEsMN0WAW/vLpKfKVcm8k+MPw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.5.0.tgz", + "integrity": "sha512-HpqNTH8Du34nLxbKgVMGljZMG0rJd2O9ecvr2QLYp+7512ty1j42KnsFwspPXg1Vh8an9YImf6CokUBltisZFQ==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "7.4.0", - "@typescript-eslint/type-utils": "7.4.0", - "@typescript-eslint/utils": "7.4.0", - "@typescript-eslint/visitor-keys": "7.4.0", + "@typescript-eslint/scope-manager": "7.5.0", + "@typescript-eslint/type-utils": "7.5.0", + "@typescript-eslint/utils": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2573,13 +2573,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.4.0.tgz", - "integrity": "sha512-68VqENG5HK27ypafqLVs8qO+RkNc7TezCduYrx8YJpXq2QGZ30vmNZGJJJC48+MVn4G2dCV8m5ZTVnzRexTVtw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.5.0.tgz", + "integrity": "sha512-Z1r7uJY0MDeUlql9XJ6kRVgk/sP11sr3HKXn268HZyqL7i4cEfrdFuSSY/0tUqT37l5zT0tJOsuDP16kio85iA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.4.0", - "@typescript-eslint/visitor-keys": "7.4.0" + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2590,13 +2590,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.4.0.tgz", - "integrity": "sha512-247ETeHgr9WTRMqHbbQdzwzhuyaJ8dPTuyuUEMANqzMRB1rj/9qFIuIXK7l0FX9i9FXbHeBQl/4uz6mYuCE7Aw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.5.0.tgz", + "integrity": "sha512-A021Rj33+G8mx2Dqh0nMO9GyjjIBK3MqgVgZ2qlKf6CJy51wY/lkkFqq3TqqnH34XyAHUkq27IjlUkWlQRpLHw==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.4.0", - "@typescript-eslint/utils": "7.4.0", + "@typescript-eslint/typescript-estree": "7.5.0", + "@typescript-eslint/utils": "7.5.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2617,9 +2617,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.4.0.tgz", - "integrity": "sha512-mjQopsbffzJskos5B4HmbsadSJQWaRK0UxqQ7GuNA9Ga4bEKeiO6b2DnB6cM6bpc8lemaPseh0H9B/wyg+J7rw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.5.0.tgz", + "integrity": "sha512-tv5B4IHeAdhR7uS4+bf8Ov3k793VEVHd45viRRkehIUZxm0WF82VPiLgHzA/Xl4TGPg1ZD49vfxBKFPecD5/mg==", "dev": true, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2630,13 +2630,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.4.0.tgz", - "integrity": "sha512-A99j5AYoME/UBQ1ucEbbMEmGkN7SE0BvZFreSnTd1luq7yulcHdyGamZKizU7canpGDWGJ+Q6ZA9SyQobipePg==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.5.0.tgz", + "integrity": "sha512-YklQQfe0Rv2PZEueLTUffiQGKQneiIEKKnfIqPIOxgM9lKSZFCjT5Ad4VqRKj/U4+kQE3fa8YQpskViL7WjdPQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.4.0", - "@typescript-eslint/visitor-keys": "7.4.0", + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2658,17 +2658,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.4.0.tgz", - "integrity": "sha512-NQt9QLM4Tt8qrlBVY9lkMYzfYtNz8/6qwZg8pI3cMGlPnj6mOpRxxAm7BMJN9K0AiY+1BwJ5lVC650YJqYOuNg==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.5.0.tgz", + "integrity": "sha512-3vZl9u0R+/FLQcpy2EHyRGNqAS/ofJ3Ji8aebilfJe+fobK8+LbIFmrHciLVDxjDoONmufDcnVSF38KwMEOjzw==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "7.4.0", - "@typescript-eslint/types": "7.4.0", - "@typescript-eslint/typescript-estree": "7.4.0", + "@typescript-eslint/scope-manager": "7.5.0", + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/typescript-estree": "7.5.0", "semver": "^7.5.4" }, "engines": { @@ -2683,12 +2683,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.4.0.tgz", - "integrity": "sha512-0zkC7YM0iX5Y41homUUeW1CHtZR01K3ybjM1l6QczoMuay0XKtrb93kv95AxUGwdjGr64nNqnOCwmEl616N8CA==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.5.0.tgz", + "integrity": "sha512-mcuHM/QircmA6O7fy6nn2w/3ditQkj+SgtOc8DW3uQ10Yfj42amm2i+6F2K4YAOPNNTmE6iM1ynM6lrSwdendA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.4.0", + "@typescript-eslint/types": "7.5.0", "eslint-visitor-keys": "^3.4.1" }, "engines": { @@ -6202,9 +6202,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.2.7", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.7.tgz", - "integrity": "sha512-k14PWOKLI6pMaSzAuGtT+Cf0YmIx12z9YGon39onaJNy8DLBfBJrzg9FQEmkAM5lpHBZs9wksWAsyF/HkpEwJA==", + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.8.tgz", + "integrity": "sha512-OyZR+c1CE8yeHw5V5t59aXsUPPVTHMDjEZz8MgguLL/Q7NblxhZUlTu9xSPqlsUO/y+X7dlU05jdhvyycD55DA==", "dev": true, "dependencies": { "esbuild": "^0.20.1", diff --git a/playground/package.json b/playground/package.json index 74f2fb03f8..d2cc478f8a 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,16 +17,16 @@ "swagger-ui-react": "^5.12.0" }, "devDependencies": { - "@types/react": "^18.2.73", + "@types/react": "^18.2.74", "@types/react-dom": "^18.2.23", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.4.0", + "@typescript-eslint/eslint-plugin": "^7.5.0", "@typescript-eslint/parser": "^7.3.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.4.3", - "vite": "^5.2.7" + "vite": "^5.2.8" } } From 9938518063c39be292ad217c7ba00ff5f1c26167 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 5 Apr 2024 11:27:26 -0700 Subject: [PATCH 20/49] test: Add flag to skip network tests (#2495) ## Relevant issue(s) Resolves #2494 ## Description This PR adds a test flag set via an environment variable to skip any tests that involve network actions. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- tests/integration/utils2.go | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index fe79b18106..18c97e76d1 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -15,6 +15,7 @@ import ( "fmt" "os" "reflect" + "strconv" "strings" "testing" "time" @@ -40,7 +41,10 @@ import ( "github.com/sourcenetwork/defradb/tests/predefined" ) -const mutationTypeEnvName = "DEFRA_MUTATION_TYPE" +const ( + mutationTypeEnvName = "DEFRA_MUTATION_TYPE" + skipNetworkTestsEnvName = "DEFRA_SKIP_NETWORK_TESTS" +) // The MutationType that tests will run using. // @@ -72,6 +76,8 @@ const ( var ( log = corelog.NewLogger("tests.integration") mutationType MutationType + // skipNetworkTests will skip any tests that involve network actions + skipNetworkTests = false ) const ( @@ -95,6 +101,9 @@ func init() { // mutation type. mutationType = CollectionSaveMutationType } + if value, ok := os.LookupEnv(skipNetworkTestsEnvName); ok { + skipNetworkTests, _ = strconv.ParseBool(value) + } } // AssertPanic asserts that the code inside the specified PanicTestFunc panics. @@ -131,6 +140,7 @@ func ExecuteTestCase( collectionNames := getCollectionNames(testCase) changeDetector.PreTestChecks(t, collectionNames) skipIfMutationTypeUnsupported(t, testCase.SupportedMutationTypes) + skipIfNetworkTest(t, testCase.Actions) var clients []ClientType if httpClient { @@ -182,6 +192,7 @@ func executeTestCase( corelog.Any("client", clientType), corelog.Any("mutationType", mutationType), corelog.String("databaseDir", databaseDir), + corelog.Bool("skipNetworkTests", skipNetworkTests), corelog.Bool("changeDetector.Enabled", changeDetector.Enabled), corelog.Bool("changeDetector.SetupOnly", changeDetector.SetupOnly), corelog.String("changeDetector.SourceBranch", changeDetector.SourceBranch), @@ -2001,6 +2012,21 @@ func skipIfMutationTypeUnsupported(t *testing.T, supportedMutationTypes immutabl } } +// skipIfNetworkTest skips the current test if the given actions +// contain network actions and skipNetworkTests is true. +func skipIfNetworkTest(t *testing.T, actions []any) { + hasNetworkAction := false + for _, act := range actions { + switch act.(type) { + case ConfigureNode: + hasNetworkAction = true + } + } + if skipNetworkTests && hasNetworkAction { + t.Skip("test involves network actions") + } +} + func ParseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { parser, err := graphql.NewParser() if err != nil { From 4811ba9386c3bb557dc88337a207fa9ed6649f54 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Mon, 8 Apr 2024 12:56:10 -0400 Subject: [PATCH 21/49] feat: Add P Counter CRDT (#2482) ## Relevant issue(s) Resolves #2116 ## Description This PR adds a Positive Counter CRDT type. It converts the PNCounter implementation to a Counter implementation that supports both PN and P counters. --- client/ctype.go | 7 +- core/crdt/{pncounter.go => counter.go} | 102 ++++--- core/crdt/errors.go | 6 + db/base/collection_keys.go | 2 +- merkle/crdt/{pncounter.go => counter.go} | 31 +- merkle/crdt/merklecrdt.go | 14 +- request/graphql/schema/types/types.go | 16 +- .../mutation/create/crdt/pcounter_test.go | 57 ++++ .../mutation/update/crdt/pcounter_test.go | 265 ++++++++++++++++++ .../mutation/update/crdt/pncounter_test.go | 5 +- .../state/simple/peer/crdt/pcounter_test.go | 124 ++++++++ .../peer_replicator/crdt/pcounter_test.go | 160 +++++++++++ .../simple/replicator/crdt/pcounter_test.go | 71 +++++ .../query/simple/with_cid_doc_id_test.go | 94 +++++++ tests/integration/schema/crdt_type_test.go | 95 +++++++ .../updates/add/field/crdt/pcounter_test.go | 73 +++++ .../updates/add/field/crdt/pncounter_test.go | 4 +- 17 files changed, 1053 insertions(+), 73 deletions(-) rename core/crdt/{pncounter.go => counter.go} (56%) rename merkle/crdt/{pncounter.go => counter.go} (55%) create mode 100644 tests/integration/mutation/create/crdt/pcounter_test.go create mode 100644 tests/integration/mutation/update/crdt/pcounter_test.go create mode 100644 tests/integration/net/state/simple/peer/crdt/pcounter_test.go create mode 100644 tests/integration/net/state/simple/peer_replicator/crdt/pcounter_test.go create mode 100644 tests/integration/net/state/simple/replicator/crdt/pcounter_test.go create mode 100644 tests/integration/schema/updates/add/field/crdt/pcounter_test.go diff --git a/client/ctype.go b/client/ctype.go index c5f792df86..f9d961ec3e 100644 --- a/client/ctype.go +++ b/client/ctype.go @@ -23,12 +23,13 @@ const ( OBJECT COMPOSITE PN_COUNTER + P_COUNTER ) // IsSupportedFieldCType returns true if the type is supported as a document field type. func (t CType) IsSupportedFieldCType() bool { switch t { - case NONE_CRDT, LWW_REGISTER, PN_COUNTER: + case NONE_CRDT, LWW_REGISTER, PN_COUNTER, P_COUNTER: return true default: return false @@ -38,7 +39,7 @@ func (t CType) IsSupportedFieldCType() bool { // IsCompatibleWith returns true if the CRDT is compatible with the field kind func (t CType) IsCompatibleWith(kind FieldKind) bool { switch t { - case PN_COUNTER: + case PN_COUNTER, P_COUNTER: if kind == FieldKind_NILLABLE_INT || kind == FieldKind_NILLABLE_FLOAT { return true } @@ -61,6 +62,8 @@ func (t CType) String() string { return "composite" case PN_COUNTER: return "pncounter" + case P_COUNTER: + return "pcounter" default: return "unknown" } diff --git a/core/crdt/pncounter.go b/core/crdt/counter.go similarity index 56% rename from core/crdt/pncounter.go rename to core/crdt/counter.go index 7d8b02c1a4..01ca3cf0da 100644 --- a/core/crdt/pncounter.go +++ b/core/crdt/counter.go @@ -1,4 +1,4 @@ -// Copyright 2023 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -33,18 +33,18 @@ import ( var ( // ensure types implements core interfaces - _ core.ReplicatedData = (*PNCounter[float64])(nil) - _ core.ReplicatedData = (*PNCounter[int64])(nil) - _ core.Delta = (*PNCounterDelta[float64])(nil) - _ core.Delta = (*PNCounterDelta[int64])(nil) + _ core.ReplicatedData = (*Counter[float64])(nil) + _ core.ReplicatedData = (*Counter[int64])(nil) + _ core.Delta = (*CounterDelta[float64])(nil) + _ core.Delta = (*CounterDelta[int64])(nil) ) type Incrementable interface { constraints.Integer | constraints.Float } -// PNCounterDelta is a single delta operation for an PNCounter -type PNCounterDelta[T Incrementable] struct { +// CounterDelta is a single delta operation for a Counter +type CounterDelta[T Incrementable] struct { DocID []byte FieldName string Priority uint64 @@ -59,17 +59,17 @@ type PNCounterDelta[T Incrementable] struct { } // GetPriority gets the current priority for this delta. -func (delta *PNCounterDelta[T]) GetPriority() uint64 { +func (delta *CounterDelta[T]) GetPriority() uint64 { return delta.Priority } // SetPriority will set the priority for this delta. -func (delta *PNCounterDelta[T]) SetPriority(prio uint64) { +func (delta *CounterDelta[T]) SetPriority(prio uint64) { delta.Priority = prio } // Marshal encodes the delta using CBOR. -func (delta *PNCounterDelta[T]) Marshal() ([]byte, error) { +func (delta *CounterDelta[T]) Marshal() ([]byte, error) { h := &codec.CborHandle{} buf := bytes.NewBuffer(nil) enc := codec.NewEncoder(buf, h) @@ -81,44 +81,50 @@ func (delta *PNCounterDelta[T]) Marshal() ([]byte, error) { } // Unmarshal decodes the delta from CBOR. -func (delta *PNCounterDelta[T]) Unmarshal(b []byte) error { +func (delta *CounterDelta[T]) Unmarshal(b []byte) error { h := &codec.CborHandle{} dec := codec.NewDecoderBytes(b, h) return dec.Decode(delta) } -// PNCounter, is a simple CRDT type that allows increment/decrement +// Counter, is a simple CRDT type that allows increment/decrement // of an Int and Float data types that ensures convergence. -type PNCounter[T Incrementable] struct { +type Counter[T Incrementable] struct { baseCRDT + AllowDecrement bool } -// NewPNCounter returns a new instance of the PNCounter with the given ID. -func NewPNCounter[T Incrementable]( +// NewCounter returns a new instance of the Counter with the given ID. +func NewCounter[T Incrementable]( store datastore.DSReaderWriter, schemaVersionKey core.CollectionSchemaVersionKey, key core.DataStoreKey, fieldName string, -) PNCounter[T] { - return PNCounter[T]{newBaseCRDT(store, key, schemaVersionKey, fieldName)} + allowDecrement bool, +) Counter[T] { + return Counter[T]{newBaseCRDT(store, key, schemaVersionKey, fieldName), allowDecrement} } -// Value gets the current register value -func (reg PNCounter[T]) Value(ctx context.Context) ([]byte, error) { - valueK := reg.key.WithValueFlag() - buf, err := reg.store.Get(ctx, valueK.ToDS()) +// Value gets the current counter value +func (c Counter[T]) Value(ctx context.Context) ([]byte, error) { + valueK := c.key.WithValueFlag() + buf, err := c.store.Get(ctx, valueK.ToDS()) if err != nil { return nil, err } return buf, nil } -// Set generates a new delta with the supplied value -func (reg PNCounter[T]) Increment(ctx context.Context, value T) (*PNCounterDelta[T], error) { +// Set generates a new delta with the supplied value. +// +// WARNING: Incrementing an integer and causing it to overflow the int64 max value +// will cause the value to roll over to the int64 min value. Incremeting a float and +// causing it to overflow the float64 max value will act like a no-op. +func (c Counter[T]) Increment(ctx context.Context, value T) (*CounterDelta[T], error) { // To ensure that the dag block is unique, we add a random number to the delta. // This is done only on update (if the doc doesn't already exist) to ensure that the // initial dag block of a document can be reproducible. - exists, err := reg.store.Has(ctx, reg.key.ToPrimaryDataStoreKey().ToDS()) + exists, err := c.store.Has(ctx, c.key.ToPrimaryDataStoreKey().ToDS()) if err != nil { return nil, err } @@ -131,29 +137,32 @@ func (reg PNCounter[T]) Increment(ctx context.Context, value T) (*PNCounterDelta nonce = r.Int64() } - return &PNCounterDelta[T]{ - DocID: []byte(reg.key.DocID), - FieldName: reg.fieldName, + return &CounterDelta[T]{ + DocID: []byte(c.key.DocID), + FieldName: c.fieldName, Data: value, - SchemaVersionID: reg.schemaVersionKey.SchemaVersionId, + SchemaVersionID: c.schemaVersionKey.SchemaVersionId, Nonce: nonce, }, nil } // Merge implements ReplicatedData interface. -// It merges two PNCounterRegisty by adding the values together. -func (reg PNCounter[T]) Merge(ctx context.Context, delta core.Delta) error { - d, ok := delta.(*PNCounterDelta[T]) +// It merges two CounterRegisty by adding the values together. +func (c Counter[T]) Merge(ctx context.Context, delta core.Delta) error { + d, ok := delta.(*CounterDelta[T]) if !ok { return ErrMismatchedMergeType } - return reg.incrementValue(ctx, d.Data, d.GetPriority()) + return c.incrementValue(ctx, d.Data, d.GetPriority()) } -func (reg PNCounter[T]) incrementValue(ctx context.Context, value T, priority uint64) error { - key := reg.key.WithValueFlag() - marker, err := reg.store.Get(ctx, reg.key.ToPrimaryDataStoreKey().ToDS()) +func (c Counter[T]) incrementValue(ctx context.Context, value T, priority uint64) error { + if !c.AllowDecrement && value < 0 { + return NewErrNegativeValue(value) + } + key := c.key.WithValueFlag() + marker, err := c.store.Get(ctx, c.key.ToPrimaryDataStoreKey().ToDS()) if err != nil && !errors.Is(err, ds.ErrNotFound) { return err } @@ -161,7 +170,7 @@ func (reg PNCounter[T]) incrementValue(ctx context.Context, value T, priority ui key = key.WithDeletedFlag() } - curValue, err := reg.getCurrentValue(ctx, key) + curValue, err := c.getCurrentValue(ctx, key) if err != nil { return err } @@ -172,16 +181,16 @@ func (reg PNCounter[T]) incrementValue(ctx context.Context, value T, priority ui return err } - err = reg.store.Put(ctx, key.ToDS(), b) + err = c.store.Put(ctx, key.ToDS(), b) if err != nil { return NewErrFailedToStoreValue(err) } - return reg.setPriority(ctx, reg.key, priority) + return c.setPriority(ctx, c.key, priority) } -func (reg PNCounter[T]) getCurrentValue(ctx context.Context, key core.DataStoreKey) (T, error) { - curValue, err := reg.store.Get(ctx, key.ToDS()) +func (c Counter[T]) getCurrentValue(ctx context.Context, key core.DataStoreKey) (T, error) { + curValue, err := c.store.Get(ctx, key.ToDS()) if err != nil { if errors.Is(err, ds.ErrNotFound) { return 0, nil @@ -192,14 +201,14 @@ func (reg PNCounter[T]) getCurrentValue(ctx context.Context, key core.DataStoreK return getNumericFromBytes[T](curValue) } -// DeltaDecode is a typed helper to extract a PNCounterDelta from a ipld.Node -func (reg PNCounter[T]) DeltaDecode(node ipld.Node) (core.Delta, error) { +// DeltaDecode is a typed helper to extract a CounterDelta from a ipld.Node +func (c Counter[T]) DeltaDecode(node ipld.Node) (core.Delta, error) { pbNode, ok := node.(*dag.ProtoNode) if !ok { return nil, client.NewErrUnexpectedType[*dag.ProtoNode]("ipld.Node", node) } - delta := &PNCounterDelta[T]{} + delta := &CounterDelta[T]{} err := delta.Unmarshal(pbNode.Data()) if err != nil { return nil, err @@ -208,6 +217,13 @@ func (reg PNCounter[T]) DeltaDecode(node ipld.Node) (core.Delta, error) { return delta, nil } +func (c Counter[T]) CType() client.CType { + if c.AllowDecrement { + return client.PN_COUNTER + } + return client.P_COUNTER +} + func getNumericFromBytes[T Incrementable](b []byte) (T, error) { var val T err := cbor.Unmarshal(b, &val) diff --git a/core/crdt/errors.go b/core/crdt/errors.go index e1148d1044..75af579850 100644 --- a/core/crdt/errors.go +++ b/core/crdt/errors.go @@ -17,6 +17,7 @@ import ( const ( errFailedToGetPriority string = "failed to get priority" errFailedToStoreValue string = "failed to store value" + errNegativeValue string = "value cannot be negative" ) // Errors returnable from this package. @@ -26,6 +27,7 @@ const ( var ( ErrFailedToGetPriority = errors.New(errFailedToGetPriority) ErrFailedToStoreValue = errors.New(errFailedToStoreValue) + ErrNegativeValue = errors.New(errNegativeValue) ErrEncodingPriority = errors.New("error encoding priority") ErrDecodingPriority = errors.New("error decoding priority") // ErrMismatchedMergeType - Tying to merge two ReplicatedData of different types @@ -41,3 +43,7 @@ func NewErrFailedToGetPriority(inner error) error { func NewErrFailedToStoreValue(inner error) error { return errors.Wrap(errFailedToStoreValue, inner) } + +func NewErrNegativeValue[T Incrementable](value T) error { + return errors.New(errNegativeValue, errors.NewKV("Value", value)) +} diff --git a/db/base/collection_keys.go b/db/base/collection_keys.go index 1277b96a81..98584454ab 100644 --- a/db/base/collection_keys.go +++ b/db/base/collection_keys.go @@ -47,7 +47,7 @@ func MakePrimaryIndexKeyForCRDT( WithInstanceInfo(key). WithFieldId(core.COMPOSITE_NAMESPACE), nil - case client.LWW_REGISTER, client.PN_COUNTER: + case client.LWW_REGISTER, client.PN_COUNTER, client.P_COUNTER: field, ok := c.GetFieldByName(fieldName) if !ok { return core.DataStoreKey{}, client.NewErrFieldNotExist(fieldName) diff --git a/merkle/crdt/pncounter.go b/merkle/crdt/counter.go similarity index 55% rename from merkle/crdt/pncounter.go rename to merkle/crdt/counter.go index 74b7adb156..6ca016cea6 100644 --- a/merkle/crdt/pncounter.go +++ b/merkle/crdt/counter.go @@ -1,4 +1,4 @@ -// Copyright 2023 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -21,40 +21,41 @@ import ( "github.com/sourcenetwork/defradb/merkle/clock" ) -// MerklePNCounter is a MerkleCRDT implementation of the PNCounter using MerkleClocks. -type MerklePNCounter[T crdt.Incrementable] struct { +// MerkleCounter is a MerkleCRDT implementation of the Counter using MerkleClocks. +type MerkleCounter[T crdt.Incrementable] struct { *baseMerkleCRDT - reg crdt.PNCounter[T] + reg crdt.Counter[T] } -// NewMerklePNCounter creates a new instance (or loaded from DB) of a MerkleCRDT -// backed by a PNCounter CRDT. -func NewMerklePNCounter[T crdt.Incrementable]( +// NewMerkleCounter creates a new instance (or loaded from DB) of a MerkleCRDT +// backed by a Counter CRDT. +func NewMerkleCounter[T crdt.Incrementable]( store Stores, schemaVersionKey core.CollectionSchemaVersionKey, key core.DataStoreKey, fieldName string, -) *MerklePNCounter[T] { - register := crdt.NewPNCounter[T](store.Datastore(), schemaVersionKey, key, fieldName) + allowDecrement bool, +) *MerkleCounter[T] { + register := crdt.NewCounter[T](store.Datastore(), schemaVersionKey, key, fieldName, allowDecrement) clk := clock.NewMerkleClock(store.Headstore(), store.DAGstore(), key.ToHeadStoreKey(), register) base := &baseMerkleCRDT{clock: clk, crdt: register} - return &MerklePNCounter[T]{ + return &MerkleCounter[T]{ baseMerkleCRDT: base, reg: register, } } -// Save the value of the PN Counter to the DAG. -func (mPNC *MerklePNCounter[T]) Save(ctx context.Context, data any) (ipld.Node, uint64, error) { +// Save the value of the Counter to the DAG. +func (mc *MerkleCounter[T]) Save(ctx context.Context, data any) (ipld.Node, uint64, error) { value, ok := data.(*client.FieldValue) if !ok { - return nil, 0, NewErrUnexpectedValueType(client.PN_COUNTER, &client.FieldValue{}, data) + return nil, 0, NewErrUnexpectedValueType(mc.reg.CType(), &client.FieldValue{}, data) } - delta, err := mPNC.reg.Increment(ctx, value.Value().(T)) + delta, err := mc.reg.Increment(ctx, value.Value().(T)) if err != nil { return nil, 0, err } - nd, err := mPNC.clock.AddDAGNode(ctx, delta) + nd, err := mc.clock.AddDAGNode(ctx, delta) return nd, delta.GetPriority(), err } diff --git a/merkle/crdt/merklecrdt.go b/merkle/crdt/merklecrdt.go index c96791d07c..5bd95c86cd 100644 --- a/merkle/crdt/merklecrdt.go +++ b/merkle/crdt/merklecrdt.go @@ -66,12 +66,12 @@ func (base *baseMerkleCRDT) Value(ctx context.Context) ([]byte, error) { func InstanceWithStore( store Stores, schemaVersionKey core.CollectionSchemaVersionKey, - ctype client.CType, + cType client.CType, kind client.FieldKind, key core.DataStoreKey, fieldName string, ) (MerkleCRDT, error) { - switch ctype { + switch cType { case client.LWW_REGISTER: return NewMerkleLWWRegister( store, @@ -79,21 +79,23 @@ func InstanceWithStore( key, fieldName, ), nil - case client.PN_COUNTER: + case client.PN_COUNTER, client.P_COUNTER: switch kind { case client.FieldKind_NILLABLE_INT: - return NewMerklePNCounter[int64]( + return NewMerkleCounter[int64]( store, schemaVersionKey, key, fieldName, + cType == client.PN_COUNTER, ), nil case client.FieldKind_NILLABLE_FLOAT: - return NewMerklePNCounter[float64]( + return NewMerkleCounter[float64]( store, schemaVersionKey, key, fieldName, + cType == client.PN_COUNTER, ), nil } case client.COMPOSITE: @@ -104,5 +106,5 @@ func InstanceWithStore( fieldName, ), nil } - return nil, client.NewErrUnknownCRDT(ctype) + return nil, client.NewErrUnknownCRDT(cType) } diff --git a/request/graphql/schema/types/types.go b/request/graphql/schema/types/types.go index 2273e3adb9..7865e204db 100644 --- a/request/graphql/schema/types/types.go +++ b/request/graphql/schema/types/types.go @@ -161,8 +161,20 @@ var ( Description: "Last Write Wins register", }, client.PN_COUNTER.String(): &gql.EnumValueConfig{ - Value: client.PN_COUNTER, - Description: "Positive-Negative Counter", + Value: client.PN_COUNTER, + Description: `Positive-Negative Counter. + +WARNING: Incrementing an integer and causing it to overflow the int64 max value +will cause the value to roll over to the int64 min value. Incremeting a float and +causing it to overflow the float64 max value will act like a no-op.`, + }, + client.P_COUNTER.String(): &gql.EnumValueConfig{ + Value: client.P_COUNTER, + Description: `Positive Counter. + +WARNING: Incrementing an integer and causing it to overflow the int64 max value +will cause the value to roll over to the int64 min value. Incremeting a float and +causing it to overflow the float64 max value will act like a no-op.`, }, }, }) diff --git a/tests/integration/mutation/create/crdt/pcounter_test.go b/tests/integration/mutation/create/crdt/pcounter_test.go new file mode 100644 index 0000000000..681ca2ec76 --- /dev/null +++ b/tests/integration/mutation/create/crdt/pcounter_test.go @@ -0,0 +1,57 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestPCounterCreate_IntKindWithPositiveValue_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Document creation with P Counter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + _docID + name + points + } + }`, + Results: []map[string]any{ + { + "_docID": "bae-a688789e-d8a6-57a7-be09-22e005ab79e0", + "name": "John", + "points": int64(10), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/crdt/pcounter_test.go b/tests/integration/mutation/update/crdt/pcounter_test.go new file mode 100644 index 0000000000..c4ff85e8b4 --- /dev/null +++ b/tests/integration/mutation/update/crdt/pcounter_test.go @@ -0,0 +1,265 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "fmt" + "math" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestPCounterUpdate_IntKindWithNegativeIncrement_ShouldError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Int type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": -10 + }`, + ExpectedError: "value cannot be negative", + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(0), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestPCounterUpdate_IntKindWithPositiveIncrement_ShouldIncrement(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Int type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents what happens when an overflow occurs in a P Counter with Int type. +func TestPCounterUpdate_IntKindWithPositiveIncrementOverflow_RollsOverToMinInt64(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Int type causing overflow behaviour", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a type error in this case + // because we are testing the internal overflow behaviour with + // a int64 but the GQL Int type is an int32. + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %d + }`, math.MaxInt64), + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 1 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(math.MinInt64), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestPCounterUpdate_FloatKindWithPositiveIncrement_ShouldIncrement(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Float type. Note the lack of precision", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10.1 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10.2 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + // Note the lack of precision of float types. + "points": 20.299999999999997, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents what happens when an overflow occurs in a P Counter with Float type. +// In this case it is the same as a no-op. +func TestPCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a P Counter with Float type and overflow causing a no-op", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 1000 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": math.MaxFloat64, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/crdt/pncounter_test.go b/tests/integration/mutation/update/crdt/pncounter_test.go index f8ede1cffc..fe350ab852 100644 --- a/tests/integration/mutation/update/crdt/pncounter_test.go +++ b/tests/integration/mutation/update/crdt/pncounter_test.go @@ -75,8 +75,9 @@ func TestPNCounterUpdate_IntKindWithPositiveIncrementOverflow_RollsOverToMinInt6 test := testUtils.TestCase{ Description: "Positive increments of a PN Counter with Int type causing overflow behaviour", SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - // GQL mutation will return an error - // when integer type overflows + // GQL mutation will return a type error in this case + // because we are testing the internal overflow behaviour with + // a int64 but the GQL Int type is an int32. testUtils.CollectionNamedMutationType, testUtils.CollectionSaveMutationType, }), diff --git a/tests/integration/net/state/simple/peer/crdt/pcounter_test.go b/tests/integration/net/state/simple/peer/crdt/pcounter_test.go new file mode 100644 index 0000000000..963b7d54cd --- /dev/null +++ b/tests/integration/net/state/simple/peer/crdt/pcounter_test.go @@ -0,0 +1,124 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package peer_test + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2PUpdate_WithPCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + // Create Shahzad on all nodes + Doc: `{ + "name": "Shahzad", + "points": 10 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PUpdate_WithPCounterSimultaneousUpdate_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + // Create John on all nodes + Doc: `{ + "Name": "John", + "Age": 0 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "Age": 45 + }`, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(1), + Doc: `{ + "Age": 45 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + "Age": int64(90), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/state/simple/peer_replicator/crdt/pcounter_test.go b/tests/integration/net/state/simple/peer_replicator/crdt/pcounter_test.go new file mode 100644 index 0000000000..a7b3c67a59 --- /dev/null +++ b/tests/integration/net/state/simple/peer_replicator/crdt/pcounter_test.go @@ -0,0 +1,160 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package peer_replicator_test + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2PPeerReplicatorWithCreate_PCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 2, + }, + testUtils.ConnectPeers{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "name": "Shahzad", + "points": 3000 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + { + "points": int64(3000), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(2), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + { + "points": int64(3000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PPeerReplicatorWithUpdate_PCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 2, + }, + testUtils.UpdateDoc{ + // Update John's points on the first node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/state/simple/replicator/crdt/pcounter_test.go b/tests/integration/net/state/simple/replicator/crdt/pcounter_test.go new file mode 100644 index 0000000000..33ea5d136d --- /dev/null +++ b/tests/integration/net/state/simple/replicator/crdt/pcounter_test.go @@ -0,0 +1,71 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replicator + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2POneToOneReplicatorUpdate_PCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + // This document is created in first node before the replicator is set up. + // Updates should be synced across nodes. + NodeID: immutable.Some(0), + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + // Update John's points on the first node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go index fee91f7399..7c265a409c 100644 --- a/tests/integration/query/simple/with_cid_doc_id_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -395,3 +395,97 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +// Note: Only the first CID is reproducible given the added entropy to the Counter CRDT type. +func TestCidAndDocIDQuery_ContainsPCounterWithIntKind_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with first cid and docID with pcounter int type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "points": 20 + }`, + }, + testUtils.Request{ + Request: `query { + Users ( + cid: "bafybeibinkgqwegghg7kqwk66etboc5jv42i4akasxrih35wrvykdwcima", + docID: "bae-a688789e-d8a6-57a7-be09-22e005ab79e0" + ) { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(10), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Note: Only the first CID is reproducible given the added entropy to the Counter CRDT type. +func TestCidAndDocIDQuery_ContainsPCounterWithFloatKind_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with first cid and docID with pcounter and float type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pcounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10.2 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "points": 20.6 + }`, + }, + testUtils.Request{ + Request: `query { + Users ( + cid: "bafybeifsok5oy42zs2p7habfjr3ee3j7mxeag5nfdo7u4d2bfvm6hdhnpq", + docID: "bae-fa6a97e9-e0e9-5826-8a8c-57775d35e07c" + ) { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": 10.2, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/crdt_type_test.go b/tests/integration/schema/crdt_type_test.go index 47388262d0..fdc278e52c 100644 --- a/tests/integration/schema/crdt_type_test.go +++ b/tests/integration/schema/crdt_type_test.go @@ -130,3 +130,98 @@ func TestSchemaCreate_ContainsPNCounterWithInvalidType_Error(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestSchemaCreate_ContainsPCounterTypeWithIntKind_NoError(t *testing.T) { + schemaVersionID := "bafkreidjvjnvtwwdkcdqwcmwxqzu3bxrbxs3rkn6h6h7kkxmibpli3mp7y" + + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.GetSchema{ + VersionID: immutable.Some(schemaVersionID), + ExpectedResults: []client.SchemaDescription{ + { + Name: "Users", + VersionID: schemaVersionID, + Root: schemaVersionID, + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + { + Name: "points", + Kind: client.FieldKind_NILLABLE_INT, + Typ: client.P_COUNTER, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaCreate_ContainsPCounterTypeWithFloatKind_NoError(t *testing.T) { + schemaVersionID := "bafkreiasm64v2oimv6uk3hlfap6awptumwkm4fxuoc3ck3ehfe2tmry66i" + + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: Float @crdt(type: "pcounter") + } + `, + }, + testUtils.GetSchema{ + VersionID: immutable.Some(schemaVersionID), + ExpectedResults: []client.SchemaDescription{ + { + Name: "Users", + VersionID: schemaVersionID, + Root: schemaVersionID, + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + { + Name: "points", + Kind: client.FieldKind_NILLABLE_FLOAT, + Typ: client.P_COUNTER, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaCreate_ContainsPCounterTypeWithWrongKind_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: String @crdt(type: "pcounter") + } + `, + ExpectedError: "CRDT type pcounter can't be assigned to field kind String", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/crdt/pcounter_test.go b/tests/integration/schema/updates/add/field/crdt/pcounter_test.go new file mode 100644 index 0000000000..b7edfe7269 --- /dev/null +++ b/tests/integration/schema/updates/add/field/crdt/pcounter_test.go @@ -0,0 +1,73 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crdt + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaUpdates_AddFieldCRDTPCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with crdt P Counter (5)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Int", "Typ": 5} } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + foo + } + }`, + Results: []map[string]any{}, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdates_AddFieldCRDTPCounterWithMismatchKind_Error(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with crdt P Counter (5)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Boolean", "Typ": 5} } + ] + `, + ExpectedError: "CRDT type pcounter can't be assigned to field kind Boolean", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/crdt/pncounter_test.go b/tests/integration/schema/updates/add/field/crdt/pncounter_test.go index 2664118c0f..e4be1c1df8 100644 --- a/tests/integration/schema/updates/add/field/crdt/pncounter_test.go +++ b/tests/integration/schema/updates/add/field/crdt/pncounter_test.go @@ -30,7 +30,7 @@ func TestSchemaUpdates_AddFieldCRDTPNCounter_NoError(t *testing.T) { testUtils.SchemaPatch{ Patch: ` [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 4, "Typ": 4} } + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Int", "Typ": 4} } ] `, }, @@ -62,7 +62,7 @@ func TestSchemaUpdates_AddFieldCRDTPNCounterWithMismatchKind_Error(t *testing.T) testUtils.SchemaPatch{ Patch: ` [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 2, "Typ": 4} } + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Boolean", "Typ": 4} } ] `, ExpectedError: "CRDT type pncounter can't be assigned to field kind Boolean", From 2a630361e95373f437e1a54b3954c9f753511caa Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 8 Apr 2024 11:33:43 -0700 Subject: [PATCH 22/49] feat: Lens runtime config (#2497) ## Relevant issue(s) Resolves #2496 ## Description This PR adds a lens runtime option. It also includes some cleanup and additional tests. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- db/config.go | 73 +++++++++++++++++++++++++++++++++++++++++++++++ db/config_test.go | 55 +++++++++++++++++++++++++++++++++++ db/db.go | 65 +++++++---------------------------------- lens/registry.go | 36 +++++++++++++---------- 4 files changed, 159 insertions(+), 70 deletions(-) create mode 100644 db/config.go create mode 100644 db/config_test.go diff --git a/db/config.go b/db/config.go new file mode 100644 index 0000000000..397956ed8b --- /dev/null +++ b/db/config.go @@ -0,0 +1,73 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/lens-vm/lens/host-go/engine/module" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/acp" + "github.com/sourcenetwork/defradb/events" +) + +const ( + defaultMaxTxnRetries = 5 + updateEventBufferSize = 100 +) + +// Option is a funtion that sets a config value on the db. +type Option func(*db) + +// WithACP enables access control. If path is empty then acp runs in-memory. +func WithACP(path string) Option { + return func(db *db) { + var acpLocal acp.ACPLocal + acpLocal.Init(context.Background(), path) + db.acp = immutable.Some[acp.ACP](&acpLocal) + } +} + +// WithACPInMemory enables access control in-memory. +func WithACPInMemory() Option { return WithACP("") } + +// WithUpdateEvents enables the update events channel. +func WithUpdateEvents() Option { + return func(db *db) { + db.events = events.Events{ + Updates: immutable.Some(events.New[events.Update](0, updateEventBufferSize)), + } + } +} + +// WithMaxRetries sets the maximum number of retries per transaction. +func WithMaxRetries(num int) Option { + return func(db *db) { + db.maxTxnRetries = immutable.Some(num) + } +} + +// WithLensPoolSize sets the maximum number of cached migrations instances to preserve per schema version. +// +// Will default to `5` if not set. +func WithLensPoolSize(size int) Option { + return func(db *db) { + db.lensPoolSize = immutable.Some(size) + } +} + +// WithLensRuntime returns an option that sets the lens registry runtime. +func WithLensRuntime(runtime module.Runtime) Option { + return func(db *db) { + db.lensRuntime = immutable.Some(runtime) + } +} diff --git a/db/config_test.go b/db/config_test.go new file mode 100644 index 0000000000..02bd81a910 --- /dev/null +++ b/db/config_test.go @@ -0,0 +1,55 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/runtimes/wasmtime" + "github.com/stretchr/testify/assert" +) + +func TestWithACP(t *testing.T) { + d := &db{} + WithACP("test")(d) + assert.True(t, d.acp.HasValue()) +} + +func TestWithACPInMemory(t *testing.T) { + d := &db{} + WithACPInMemory()(d) + assert.True(t, d.acp.HasValue()) +} + +func TestWithUpdateEvents(t *testing.T) { + d := &db{} + WithUpdateEvents()(d) + assert.NotNil(t, d.events) +} + +func TestWithMaxRetries(t *testing.T) { + d := &db{} + WithMaxRetries(10)(d) + assert.True(t, d.maxTxnRetries.HasValue()) + assert.Equal(t, 10, d.maxTxnRetries.Value()) +} + +func TestWithLensPoolSize(t *testing.T) { + d := &db{} + WithLensPoolSize(10)(d) + assert.Equal(t, 10, d.lensPoolSize.Value()) +} + +func TestWithLensRuntime(t *testing.T) { + d := &db{} + WithLensRuntime(wasmtime.New())(d) + assert.NotNil(t, d.lensRuntime.Value()) +} diff --git a/db/db.go b/db/db.go index 1b02e1d0c1..239b26f9a7 100644 --- a/db/db.go +++ b/db/db.go @@ -22,6 +22,7 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" ds "github.com/ipfs/go-datastore" dsq "github.com/ipfs/go-datastore/query" + "github.com/lens-vm/lens/host-go/engine/module" "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" @@ -44,11 +45,6 @@ var ( _ client.Collection = (*collection)(nil) ) -const ( - defaultMaxTxnRetries = 5 - updateEventBufferSize = 100 -) - // DB is the main interface for interacting with the // DefraDB storage system. type db struct { @@ -59,17 +55,19 @@ type db struct { events events.Events - parser core.Parser + parser core.Parser + + // The maximum number of cached migrations instances to preserve per schema version. + lensPoolSize immutable.Option[int] + lensRuntime immutable.Option[module.Runtime] + lensRegistry client.LensRegistry // The maximum number of retries per transaction. maxTxnRetries immutable.Option[int] - // The maximum number of cached migrations instances to preserve per schema version. - lensPoolSize immutable.Option[int] - // The options used to init the database - options any + options []Option // The ID of the last transaction created. previousTxnID atomic.Uint64 @@ -78,46 +76,6 @@ type db struct { acp immutable.Option[acp.ACP] } -// Functional option type. -type Option func(*db) - -// WithACP enables access control. If path is empty then acp runs in-memory. -func WithACP(path string) Option { - return func(db *db) { - var acpLocal acp.ACPLocal - acpLocal.Init(context.Background(), path) - db.acp = immutable.Some[acp.ACP](&acpLocal) - } -} - -// WithACPInMemory enables access control in-memory. -func WithACPInMemory() Option { return WithACP("") } - -// WithUpdateEvents enables the update events channel. -func WithUpdateEvents() Option { - return func(db *db) { - db.events = events.Events{ - Updates: immutable.Some(events.New[events.Update](0, updateEventBufferSize)), - } - } -} - -// WithMaxRetries sets the maximum number of retries per transaction. -func WithMaxRetries(num int) Option { - return func(db *db) { - db.maxTxnRetries = immutable.Some(num) - } -} - -// WithLensPoolSize sets the maximum number of cached migrations instances to preserve per schema version. -// -// Will default to `5` if not set. -func WithLensPoolSize(num int) Option { - return func(db *db) { - db.lensPoolSize = immutable.Some(num) - } -} - // NewDB creates a new instance of the DB using the given options. func NewDB( ctx context.Context, @@ -149,15 +107,12 @@ func newDB( // apply options for _, opt := range options { - if opt == nil { - continue - } opt(db) } - // lensPoolSize may be set by `options`, and because they are funcs on db + // lens options may be set by `WithLens` funcs, and because they are funcs on db // we have to mutate `db` here to set the registry. - db.lensRegistry = lens.NewRegistry(db.lensPoolSize, db) + db.lensRegistry = lens.NewRegistry(db, db.lensPoolSize, db.lensRuntime) err = db.initialize(ctx) if err != nil { diff --git a/lens/registry.go b/lens/registry.go index ba24779611..ede3b99bb2 100644 --- a/lens/registry.go +++ b/lens/registry.go @@ -84,24 +84,30 @@ const DefaultPoolSize int = 5 // NewRegistry instantiates a new registery. // // It will be of size 5 (per schema version) if a size is not provided. -func NewRegistry(lensPoolSize immutable.Option[int], db TxnSource) client.LensRegistry { - var size int - if lensPoolSize.HasValue() { - size = lensPoolSize.Value() - } else { - size = DefaultPoolSize +func NewRegistry( + db TxnSource, + poolSize immutable.Option[int], + runtime immutable.Option[module.Runtime], +) client.LensRegistry { + registry := &lensRegistry{ + poolSize: DefaultPoolSize, + runtime: wasmtime.New(), + modulesByPath: map[string]module.Module{}, + lensPoolsByCollectionID: map[uint32]*lensPool{}, + reversedPoolsByCollectionID: map[uint32]*lensPool{}, + txnCtxs: map[uint64]*txnContext{}, + } + + if poolSize.HasValue() { + registry.poolSize = poolSize.Value() + } + if runtime.HasValue() { + registry.runtime = runtime.Value() } return &implicitTxnLensRegistry{ - db: db, - registry: &lensRegistry{ - poolSize: size, - runtime: wasmtime.New(), - modulesByPath: map[string]module.Module{}, - lensPoolsByCollectionID: map[uint32]*lensPool{}, - reversedPoolsByCollectionID: map[uint32]*lensPool{}, - txnCtxs: map[uint64]*txnContext{}, - }, + db: db, + registry: registry, } } From 15a9b419262b5668f5b30c7bca33883d57a7a314 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 10 Apr 2024 13:26:42 -0400 Subject: [PATCH 23/49] bot: Update dependencies (bulk dependabot PRs) 2024-04-09 (#2509) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #2506 bot: Bump @types/react-dom from 18.2.23 to 18.2.24 in /playground #2505 bot: Bump swagger-ui-react from 5.12.0 to 5.14.0 in /playground #2504 bot: Bump eslint-plugin-react-refresh from 0.4.5 to 0.4.6 in /playground #2502 bot: Bump graphiql from 3.1.1 to 3.2.0 in /playground #2501 bot: Bump github.com/getkin/kin-openapi from 0.123.0 to 0.124.0 #2500 bot: Bump go.opentelemetry.io/otel/metric from 1.24.0 to 1.25.0 #2498 bot: Bump github.com/cometbft/cometbft from 0.38.5 to 0.38.6 #2485 bot: Bump github.com/btcsuite/btcd from 0.22.1 to 0.23.2 ⚠️ The following PRs were left out due to merge conflicts: #2503 bot: Bump typescript from 5.4.3 to 5.4.4 in /playground #2499 bot: Bump go.opentelemetry.io/otel/sdk/metric from 1.24.0 to 1.25.0 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Fred Carle --- go.mod | 10 +- go.sum | 20 +- playground/package-lock.json | 580 ++++++++++++++++++----------------- playground/package.json | 8 +- 4 files changed, 315 insertions(+), 303 deletions(-) diff --git a/go.mod b/go.mod index ff973e637a..e22cbe9f3c 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/cosmos/gogoproto v1.4.11 github.com/evanphx/json-patch/v5 v5.9.0 github.com/fxamacker/cbor/v2 v2.6.0 - github.com/getkin/kin-openapi v0.123.0 + github.com/getkin/kin-openapi v0.124.0 github.com/go-chi/chi/v5 v5.0.12 github.com/go-chi/cors v1.2.1 github.com/go-errors/errors v1.5.1 @@ -44,7 +44,7 @@ require ( github.com/ugorji/go/codec v1.2.12 github.com/valyala/fastjson v1.6.4 github.com/vito/go-sse v1.0.0 - go.opentelemetry.io/otel/metric v1.24.0 + go.opentelemetry.io/otel/metric v1.25.0 go.opentelemetry.io/otel/sdk/metric v1.24.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 @@ -86,7 +86,7 @@ require ( github.com/cockroachdb/pebble v1.1.0 // indirect github.com/cockroachdb/redact v1.1.5 // indirect github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect - github.com/cometbft/cometbft v0.38.5 // indirect + github.com/cometbft/cometbft v0.38.6 // indirect github.com/cometbft/cometbft-db v0.9.1 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect @@ -282,9 +282,9 @@ require ( github.com/zondax/ledger-go v0.14.3 // indirect go.etcd.io/bbolt v1.3.8 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/otel v1.24.0 // indirect + go.opentelemetry.io/otel v1.25.0 // indirect go.opentelemetry.io/otel/sdk v1.24.0 // indirect - go.opentelemetry.io/otel/trace v1.24.0 // indirect + go.opentelemetry.io/otel/trace v1.25.0 // indirect go.uber.org/dig v1.17.1 // indirect go.uber.org/fx v1.20.1 // indirect go.uber.org/mock v0.4.0 // indirect diff --git a/go.sum b/go.sum index a597ef0dff..c4e7132f38 100644 --- a/go.sum +++ b/go.sum @@ -168,8 +168,8 @@ github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZ github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= -github.com/cometbft/cometbft v0.38.5 h1:4lOcK5VTPrfbLOhNHmPYe6c7eDXHtBdMCQuKbAfFJdU= -github.com/cometbft/cometbft v0.38.5/go.mod h1:0tqKin+KQs8zDwzYD8rPHzSBIDNPuB4NrwwGDNb/hUg= +github.com/cometbft/cometbft v0.38.6 h1:QSgpCzrGWJ2KUq1qpw+FCfASRpE27T6LQbfEHscdyOk= +github.com/cometbft/cometbft v0.38.6/go.mod h1:8rSPxzUJYquCN8uuBgbUHOMg2KAwvr7CyUw+6ukO4nw= github.com/cometbft/cometbft-db v0.9.1 h1:MIhVX5ja5bXNHF8EYrThkG9F7r9kSfv8BX4LWaxWJ4M= github.com/cometbft/cometbft-db v0.9.1/go.mod h1:iliyWaoV0mRwBJoizElCwwRA9Tf7jZJOURcRZF9m60U= github.com/containerd/cgroups v0.0.0-20201119153540-4cbc285b3327/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= @@ -308,8 +308,8 @@ github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nos github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fxamacker/cbor/v2 v2.6.0 h1:sU6J2usfADwWlYDAFhZBQ6TnLFBHxgesMrQfQgk1tWA= github.com/fxamacker/cbor/v2 v2.6.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= -github.com/getkin/kin-openapi v0.123.0 h1:zIik0mRwFNLyvtXK274Q6ut+dPh6nlxBp0x7mNrPhs8= -github.com/getkin/kin-openapi v0.123.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= +github.com/getkin/kin-openapi v0.124.0 h1:VSFNMB9C9rTKBnQ/fpyDU8ytMTr4dWI9QovSKj9kz/M= +github.com/getkin/kin-openapi v0.124.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= github.com/getsentry/sentry-go v0.27.0 h1:Pv98CIbtB3LkMWmXi4Joa5OOcwbmnX88sF5qbK3r3Ps= github.com/getsentry/sentry-go v0.27.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= @@ -1183,16 +1183,16 @@ go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= -go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= -go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= -go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= +go.opentelemetry.io/otel v1.25.0 h1:gldB5FfhRl7OJQbUHt/8s0a7cE8fbsPAtdpRaApKy4k= +go.opentelemetry.io/otel v1.25.0/go.mod h1:Wa2ds5NOXEMkCmUou1WA7ZBfLTHWIsp034OVD7AO+Vg= +go.opentelemetry.io/otel/metric v1.25.0 h1:LUKbS7ArpFL/I2jJHdJcqMGxkRdxpPHE0VU/D4NuEwA= +go.opentelemetry.io/otel/metric v1.25.0/go.mod h1:rkDLUSd2lC5lq2dFNrX9LGAbINP5B7WBkC78RXCpH5s= go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= go.opentelemetry.io/otel/sdk/metric v1.24.0 h1:yyMQrPzF+k88/DbH7o4FMAs80puqd+9osbiBrJrz/w8= go.opentelemetry.io/otel/sdk/metric v1.24.0/go.mod h1:I6Y5FjH6rvEnTTAYQz3Mmv2kl6Ek5IIrmwTLqMrrOE0= -go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= -go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= +go.opentelemetry.io/otel/trace v1.25.0 h1:tqukZGLwQYRIFtSQM2u2+yfMVTgGVeqRLPUYx1Dq6RM= +go.opentelemetry.io/otel/trace v1.25.0/go.mod h1:hCCs70XM/ljO+BeQkyFnbK28SBIJ/Emuha+ccrCRT7I= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= diff --git a/playground/package-lock.json b/playground/package-lock.json index 03f61a4776..054f60868e 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -8,22 +8,22 @@ "name": "playground", "version": "0.0.0", "dependencies": { - "graphiql": "^3.1.1", + "graphiql": "^3.2.0", "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.12.0" + "swagger-ui-react": "^5.14.0" }, "devDependencies": { "@types/react": "^18.2.74", - "@types/react-dom": "^18.2.23", + "@types/react-dom": "^18.2.24", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.5.0", "@typescript-eslint/parser": "^7.3.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", - "eslint-plugin-react-refresh": "^0.4.5", + "eslint-plugin-react-refresh": "^0.4.6", "typescript": "^5.4.3", "vite": "^5.2.8" } @@ -49,9 +49,9 @@ } }, "node_modules/@babel/runtime-corejs3": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.24.0.tgz", - "integrity": "sha512-HxiRMOncx3ly6f3fcZ1GVKf+/EROcI9qwPgmij8Czqy6Okm/0T37T4y2ZIlLUuEUFjtM7NRsfdCO8Y3tAiJZew==", + "version": "7.24.4", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.24.4.tgz", + "integrity": "sha512-VOQOexSilscN24VEY810G/PqtpFvx/z6UqDIjIWbDe2368HhDLkYN5TYwaEz/+eRCUkhJ2WaNLLmQAlxzfWj4w==", "dependencies": { "core-js-pure": "^3.30.2", "regenerator-runtime": "^0.14.0" @@ -61,9 +61,9 @@ } }, "node_modules/@braintree/sanitize-url": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.0.0.tgz", - "integrity": "sha512-GMu2OJiTd1HSe74bbJYQnVvELANpYiGFZELyyTM1CR0sdv5ReQAcJ/c/8pIrPab3lO11+D+EpuGLUxqz+y832g==" + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.0.1.tgz", + "integrity": "sha512-URg8UM6lfC9ZYqFipItRSxYJdgpU5d2Z4KnjsJ+rj6tgAmGme7E+PQNCiud8g0HDaZKMovu2qjfa0f5Ge0Vlsg==" }, "node_modules/@codemirror/language": { "version": "6.0.0", @@ -86,9 +86,9 @@ "peer": true }, "node_modules/@codemirror/view": { - "version": "6.25.1", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.25.1.tgz", - "integrity": "sha512-2LXLxsQnHDdfGzDvjzAwZh2ZviNJm7im6tGpa0IONIDnFd8RZ80D2SNi8PDi6YjKcMoMRK20v6OmKIdsrwsyoQ==", + "version": "6.26.1", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.26.1.tgz", + "integrity": "sha512-wLw0t3R9AwOSQThdZ5Onw8QQtem5asE7+bPlnzc57eubPqiuJKIzwjMZ+C42vQett+iva+J8VgFV4RYWDBh5FA==", "peer": true, "dependencies": { "@codemirror/state": "^6.4.0", @@ -592,9 +592,9 @@ "integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==" }, "node_modules/@graphiql/react": { - "version": "0.20.3", - "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.20.3.tgz", - "integrity": "sha512-LHEiWQPABflTyRJZBZB50WSlrWER4RtlWg9XV1+D4yZQ3+6GbLM7X1zYf4D/TQ6AJB/vLZQHEnbhS0LuKcNqfA==", + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.21.0.tgz", + "integrity": "sha512-UlXzG78HC5+CgQYXw0jVZPoZX0Uh2jPIrqLBIxAdAWMZsmcHMZHAujZtION1pbIrv22cWxP95W+8RpDIHijYow==", "dependencies": { "@graphiql/toolkit": "^0.9.1", "@headlessui/react": "^1.7.15", @@ -605,11 +605,11 @@ "@types/codemirror": "^5.60.8", "clsx": "^1.2.1", "codemirror": "^5.65.3", - "codemirror-graphql": "^2.0.10", + "codemirror-graphql": "^2.0.11", "copy-to-clipboard": "^3.2.0", "framer-motion": "^6.5.1", "graphql-language-service": "^5.2.0", - "markdown-it": "^12.2.0", + "markdown-it": "^14.1.0", "set-value": "^4.1.0" }, "peerDependencies": { @@ -1650,12 +1650,12 @@ ] }, "node_modules/@swagger-api/apidom-ast": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.97.0.tgz", - "integrity": "sha512-KpPyC8x5ZrB4l9+jgl8FAhokedh+8b5VuBTTdTJKFf+x5uznMiBf/MZTWgvsIk8/9MtjkQYUN1qgVzEPiKWvHg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.99.1.tgz", + "integrity": "sha512-evkKm2JaqNfg3dB2Yk3FWL/Qy2r4csZLMZ9bHMG+xNpti8ulENHMjuCh3Ry4koV1gD7IA54CU2ZjcaTvqJa22Q==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1663,13 +1663,13 @@ } }, "node_modules/@swagger-api/apidom-core": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-0.97.0.tgz", - "integrity": "sha512-3LYlN0Cox0FBFNZqmgi7VyJ4MXppCmZoFjlurT+Y90ND1y2lCidcwjAthr3QpV8b+UCc7MG3APBGRfwqaYZ2IA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-0.99.1.tgz", + "integrity": "sha512-oWU9Re2B7hPFAnm4ymN2HNOqevMqZsvL4Fjud2qN+KFWNvZ1/r8kwQaj0Pba5Kwka2bcWo0aEfWNayP4axTB+Q==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "minim": "~0.23.8", "ramda": "~0.29.1", @@ -1679,36 +1679,36 @@ } }, "node_modules/@swagger-api/apidom-error": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-0.97.0.tgz", - "integrity": "sha512-Y2YRnsJSXp+MdgwwMSCtidzJfy/bL6CZEpc+5aWUw1mphTjfLZC66uA4btUgUevyiT6mNHXm8tUmGomHA7Izdw==", + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-0.99.0.tgz", + "integrity": "sha512-ZdFdn+GeIo23X2GKFrfH4Y5KY8yTzVF1l/Mqjs8+nD30LTbYg6f3ITHn429dk8fDT3NT69fG+gGm60FAFaKkeQ==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7" } }, "node_modules/@swagger-api/apidom-json-pointer": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-0.97.0.tgz", - "integrity": "sha512-9vcgePgcYXUiYEqnvx8Ew04j8JtfenosysbSuGgRs93Ls8mQ/+ndIOklHaXJzNjBZZxqxS0p6QLFcj1jpUiojQ==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-0.99.1.tgz", + "integrity": "sha512-4fOOKTLoBWpfX2eGNx93sqBsS1KRCtBFOq75n1jMcRbs1rrj+JxcaiTFUE+6BZqIqBsCqTmRMYE/HsgwBS3vhQ==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-ns-api-design-systems": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-0.97.0.tgz", - "integrity": "sha512-uSTIEX4q9XWoP9TQq9nEtW5xG3hVQN2VD5spYoxvYlzUOtg12yxkVgu776eq0kVZd74acZhKIF7mn3uiqaQcHA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-0.99.1.tgz", + "integrity": "sha512-LID3n+Y2eKBzaR7oYShto48+EFPBLZLuKIJdEZ53is6SqD5jHS0Ev6xLj2QfqSIQR3OoVN3PUOrz724Jkpiv/A==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1716,14 +1716,14 @@ } }, "node_modules/@swagger-api/apidom-ns-asyncapi-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-0.97.0.tgz", - "integrity": "sha512-buEQSrXdtjoAkqIWSZ448HlvnareupthIoObYELp25LVuQwhxxVSY3NR0aCIR37GHgSchrmPBVcsvPMtXV96BA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-0.99.1.tgz", + "integrity": "sha512-fAUsKbg0MuvEPjE2UWQu+62K0eh/3yTE2M5u/QCqpj48IpByMNYLKU9ICfMMAzBjXNQAVuEr07/UgY9CRHUVhA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-7": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-json-schema-draft-7": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1731,13 +1731,13 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-4": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-0.97.0.tgz", - "integrity": "sha512-eBMIPxX4huNDGle6TOfSe1kKS1/HvL6w66GWWLFxZW2doCQHMADgjo7j/kVowrXiJtEoMgjBVp3W30WkcwBVug==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-0.99.1.tgz", + "integrity": "sha512-HdxD4WXnaMJsdodrWoynzgteg9UDaZsVkX04oObQPR3C1ZWW9KahEGBSbtr/oBhnE/QgiPfNHUDWrQvk3oC6lg==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-core": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-core": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1745,15 +1745,15 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-6": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-0.97.0.tgz", - "integrity": "sha512-tRbg3/b4aJGfcODc0HDngZDjBdhPAv8OZM1OZdsqI4EEIw3PI/wpd+b6b8a5udOjAdbUYqnYsq6gCylCDNBnzw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-0.99.1.tgz", + "integrity": "sha512-O6A25j9y+Hjvwwq8x+uTaIhK4tp0CqO6YrFRXmfmOnkBtJ6Q66jqbvRzIN9XQfW8VaIipqAlOin++ufsfuDd1g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1761,15 +1761,15 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-7": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-0.97.0.tgz", - "integrity": "sha512-0GITsoa6kVVkoKBUxyeODmh6vjGXuvDQZd3Vxs1nz0c/O6ZR+VBfBB3JW5wzhVr+WCXebaOJGDyWkxJMHKycxw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-0.99.1.tgz", + "integrity": "sha512-I4IpTkAlParfUWOi5kJU7jQqeMKy39JOWiRz8jTyPoZ8vvixVgyIlOS7/bj5uLxbBw3QxOFXPuIqUvK1uFElAg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-6": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-json-schema-draft-6": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1777,15 +1777,15 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-0.97.0.tgz", - "integrity": "sha512-5gOA9FiO1J9OxJhcVBeXdm77kuh2cwPXG6Sh/DOlbk733Pz9v9W0aQgpLi5Ltsgagxe1sHhBqxJ1asw10QFzzw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-0.99.1.tgz", + "integrity": "sha512-ChEd1RaJKrYskLTmlH8NL9tNpAgroSPklTwJCvHmZjzaWvW7N/B2diHBOaz+rnVLiW9Hb7QOlR/biEXJn7OUIg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1793,14 +1793,14 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-3-0": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-0.97.0.tgz", - "integrity": "sha512-fbnN87SF0WN/4DcSpceuo+NUtkAGeicMIucEMF+LIIiCAF27Xi5d6Q823i9DgOEfJtifHKVj6Zhl/zSKAD2eyw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-0.99.1.tgz", + "integrity": "sha512-9lfa2a+4rLp+1loEXrr+Dq3whdBwBWHukctsX/C/cGr4SG0NO8+tmS3FLsOD+ly6O/YPdszPDxVcIqqNV8J2uA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1808,14 +1808,14 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-3-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-0.97.0.tgz", - "integrity": "sha512-DyvkTim+t7iVKyze6N3tITsfyElthmOwOcxwOjKj/3lySEy61DuY4X2FaPD5+owftVDxMs4Q6F9Chm7qv91a+Q==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-0.99.1.tgz", + "integrity": "sha512-XsRxM9WC+WywBo+rr/YUayQRsV2mN8AzBxVlKzJoZ+pBgmPYe24n3Ma/0FTr8zGwQyg4DtOBwydlYz8QFrLPFA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-0": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1823,14 +1823,14 @@ } }, "node_modules/@swagger-api/apidom-ns-workflows-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-0.97.0.tgz", - "integrity": "sha512-eIuoTRSITlUtMjpM3J0H9b2rVeEVu13i/Fv6+ZMPob0yHmQBWo9bnLjxxnfEZkpvp050worKULfNMdJV8NKBkA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-0.99.1.tgz", + "integrity": "sha512-s6SmFzlBmKKRdlyLdZsjXHYJ+7+AuDyK3qrBAPHX7mDe/uN6D7QPGD05oCzHytPhbeZQPMf0wi9vPUrM1s1xvw==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1838,75 +1838,75 @@ } }, "node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-json": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-0.97.0.tgz", - "integrity": "sha512-ZDzaiTHMEpz0kM0/iyHEjySTf0xoLKDJwJiSxKNuew141k0rakTVeVisxXeq+6JQi2eC6KuyS98DHMe7hEIVUw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-0.99.1.tgz", + "integrity": "sha512-ONeGsOZPZ16SvYbfHKiLjg8IeKGg+nJC+fOIqnelGdMCu/34ed0X7k6XQZGrwbDtmSd3SkXykL3F55H5BFiUPQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-api-design-systems": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-api-design-systems": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-yaml": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-0.97.0.tgz", - "integrity": "sha512-5/BziPWqrHLr91VR+EC4pXt/fNToWMmvG+d7RVjksHinrjps2E6HA+oZOhqKqA2LRCLNjGhNUptXzRMDjjtenw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-0.99.1.tgz", + "integrity": "sha512-mVOHebofGhI3E8HW/7YsqGOpIWOBSMc5R5aQFMYMYpTxrpDHNhyEfFEWqZRAoC2Hin9NZ2BeI/hsrXGIw/LoeQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-api-design-systems": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-api-design-systems": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-asyncapi-json-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-0.97.0.tgz", - "integrity": "sha512-XLD/YZifnhezRQY5ADQQAje5G5qtZ4GAbXk//1sRNe3R/qCk1pDxmRYr27yzt8w1XhfM+9VQmCTI21ZFpNFQOA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-0.99.1.tgz", + "integrity": "sha512-2kKVf5ecTuDirPpk8nDRyTrT0tkrWjdaUPwJ/+l2RdgWYObNVwdX2lAS9URC4zK/drdQOQxjetF+aDQBBhXmXA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-asyncapi-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-0.97.0.tgz", - "integrity": "sha512-whyThDiGN4FoNirgY0XtXF7IJeU6NfsrBwjaxCkYBuSPslZBoWy4ojEQbfg+2HqNLbnHKJyvabh9/tSIxgB92A==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-0.99.1.tgz", + "integrity": "sha512-UX+rLOUSQuWe5yNXS8eLFvDhCA1CP5r80jLtvT3n0FDnss4+9WkPlqgj4UPH4XoitXSvBVOZxbdjNwfKtJzsHA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-asyncapi-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-json": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-0.97.0.tgz", - "integrity": "sha512-MPhAX77Z9Csti+Kljtbrl/ez2H610R4fQg0RnkNW40f4e6TXeOogT5tmceeWP+IKGAKX45HA1JpVPxdtSJn3ww==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-0.99.1.tgz", + "integrity": "sha512-qVeSdhaDIggIkFtMI4aqqv4MYuJlRQ6pniP+Li+DjcHeTKYHelX0OwoznaTlLlZ1tM9QFaMi8rw8xfGp6vMHgg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1916,135 +1916,135 @@ } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-0.97.0.tgz", - "integrity": "sha512-HtaoRN7wnVB2ilxs/RpLBR7+MwIfUqUcdCzC/EVV788CnSbutwj61W3jR2w9BRXeANJ4K2APcvU4W7WiI9Sugg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-0.99.1.tgz", + "integrity": "sha512-aHzdast9HMeGTaTUWwVovMcspEVCAdvBJe47BzMZfzcVOnZlAVyTmLqxQ/3s9fjseRrPhFYqKtCOKROzbWeAhg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-0": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-0.97.0.tgz", - "integrity": "sha512-psfxh7k671HukibaY53cems0fcsLQP8U5lQPzVDevEGJQoguAWHyV2C5kOr52XOJInmsN5E+COEn6oPzsIaDCg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-0.99.1.tgz", + "integrity": "sha512-l/nYccP87GL611W9OCiYWUOizhhoGenuKa7Ocmaf9Rg+xIDnPw29+9p/SuGEN2jjtql0iYuNI4+ZzwiC2+teSg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-0": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-0.97.0.tgz", - "integrity": "sha512-PJpcLhS441ATFjbCHHhVUPd8K1JZaiFQJS7yfQEKQmA5MlBRh3w7mqCJAbZN49wuMkelTdB8qJJlVEGUDSxX5Q==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-0.99.1.tgz", + "integrity": "sha512-Eie4ztKR5hgrGESBDHB9xIODTB/gvjWBwPNveZ/iSlJ/yhZGyDMC8dgv0aQiyFP01mKaaBMhyZjWgsvts9l+cQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-0.97.0.tgz", - "integrity": "sha512-X5saN/AElpS+LohbSjNPesUPWYOM8Wb19+OD7/WS1r6AVRIlj5gKLy3vO7BLBvaER5G73qYylfrPxCoUPlpZZg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-0.99.1.tgz", + "integrity": "sha512-MzjUyhGmJ+jQly90Nak7s01x2Jp1GvBe+Z8BXwkArNOFjLvzQIjdAx7F943/VlLaV9y71DNXVsqhgKdiqjnX3w==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-0.97.0.tgz", - "integrity": "sha512-kBW6atIN0rONf9kjNeE5eHkxb3amfby0vxKfk+9fiRdQbJVCg4UiWOFmU5rD9bc2smtLWSQNkjlMkKS3i2/4Wg==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-0.99.1.tgz", + "integrity": "sha512-TF/yquy1Alce/olQzR5AnjnOx7o7q8MkXMi0JxrtqvMk9Ky//0qFxFGzFQEzA++NaSGt9StG0Pcgp4MGZAzJYg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-0": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-0.97.0.tgz", - "integrity": "sha512-cclRwQ9IQj6sFLUCDzqRbbbplQfKdt9xz8YONvtq4XBHZO6Ab8z5CF3A9eLiuW1TJZ3y0QU7xmI6h5jWwUrC9w==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-0.99.1.tgz", + "integrity": "sha512-baXbKqjnbmgEmFgCVHlDEiFANHs5lHnnBM0X3k5kNtAVule6Lc5lAZVoySpTGyBJ+4nq4RHNJfbKW8RDHgVMoQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-workflows-json-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-0.97.0.tgz", - "integrity": "sha512-UvnISzq5JDG43sTIJ2oE8u8qALHmBKbYMGncYgUdlHx7z5RgPAWxIRDWH40YFzUSuKSRNp4TI7eG/9MUd3RnGA==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-0.99.1.tgz", + "integrity": "sha512-Uu8SaQfl2XiiXDQVRUvUCu3yk7jwHVmwKOoacbJGzPducrR/7/bOe8dNeN4CMRw7HKeRbh02UxXtR46mgBPnog==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-workflows-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-workflows-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-workflows-yaml-1": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-0.97.0.tgz", - "integrity": "sha512-TTZS0YkFvy0X8Huom+fr3muZsCy8mtDpuUks45EvPqv6gjGLCBw3/AZ507CS0YxYvoERbXkYfAYqxW8lptwKuQ==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-0.99.1.tgz", + "integrity": "sha512-9DX9X9wxW6TJF5lG0k/w0GxeMPkHACwEQx/QFJqg1YRD3/UWSkBcm567KbfCh5BiDx5p5WAYhTGInQEAF3d0zQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-ns-workflows-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-ns-workflows-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-yaml-1-2": { - "version": "0.97.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-0.97.0.tgz", - "integrity": "sha512-3f1ADjQyKyLnuRhPuoHMgWMW28o0ylohWCQwX4q69CMH0kqGxP7HnqIU/i0I2cxZdjGv72OCdiKwaR/OgHcmEw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-0.99.1.tgz", + "integrity": "sha512-MmTDUkrvFIg2AwzaZmiqBifWpoECh7AKeJcAD8Tm+G2/FUmGr3mIr7elc4ehYt/fecSSJEwFGNFU/radKqT/6g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.97.0", - "@swagger-api/apidom-core": "^0.97.0", - "@swagger-api/apidom-error": "^0.97.0", + "@swagger-api/apidom-ast": "^0.99.1", + "@swagger-api/apidom-core": "^0.99.1", + "@swagger-api/apidom-error": "^0.99.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -2054,12 +2054,12 @@ } }, "node_modules/@swagger-api/apidom-reference": { - "version": "0.97.1", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-0.97.1.tgz", - "integrity": "sha512-Bs1U2VutmVpqbCxbCt4DTiL8v0s6osAJx+4v49BGrTcfFFh97K/EOAm48WgA8ViP7qHUNBhUF83rjbpEwOshFw==", + "version": "0.99.1", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-0.99.1.tgz", + "integrity": "sha512-g7xp+ZL/iRX6CEwdUnqqsLfZmaSRlXwEZV8LF1k4k13/o7Qcf7bsPv0fOVGa8ZC29zM8k//FVavwWoXvT2xrFQ==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.97.0", + "@swagger-api/apidom-core": "^0.99.1", "@types/ramda": "~0.29.6", "axios": "^1.4.0", "minimatch": "^7.4.3", @@ -2069,27 +2069,27 @@ "stampit": "^4.3.2" }, "optionalDependencies": { - "@swagger-api/apidom-error": "^0.97.0", - "@swagger-api/apidom-json-pointer": "^0.97.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.97.0", - "@swagger-api/apidom-ns-openapi-2": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.97.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.97.0", - "@swagger-api/apidom-ns-workflows-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^0.97.0", - "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^0.97.0", - "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-json": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-json-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^0.97.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-workflows-json-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-workflows-yaml-1": "^0.97.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.97.0" + "@swagger-api/apidom-error": "^0.99.0", + "@swagger-api/apidom-json-pointer": "^0.99.1", + "@swagger-api/apidom-ns-asyncapi-2": "^0.99.1", + "@swagger-api/apidom-ns-openapi-2": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-0": "^0.99.1", + "@swagger-api/apidom-ns-openapi-3-1": "^0.99.1", + "@swagger-api/apidom-ns-workflows-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^0.99.1", + "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^0.99.1", + "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-json": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-json-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^0.99.1", + "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-workflows-json-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-workflows-yaml-1": "^0.99.1", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.99.1" } }, "node_modules/@swagger-api/apidom-reference/node_modules/minimatch": { @@ -2317,11 +2317,11 @@ "dev": true }, "node_modules/@tanstack/react-virtual": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.1.3.tgz", - "integrity": "sha512-YCzcbF/Ws/uZ0q3Z6fagH+JVhx4JLvbSflgldMgLsuvB8aXjZLLb3HvrEVxY480F9wFlBiXlvQxOyXb5ENPrNA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.2.0.tgz", + "integrity": "sha512-OEdMByf2hEfDa6XDbGlZN8qO6bTjlNKqjM3im9JG+u3mCL8jALy0T/67oDI001raUUPh1Bdmfn4ZvPOV5knpcg==", "dependencies": { - "@tanstack/virtual-core": "3.1.3" + "@tanstack/virtual-core": "3.2.0" }, "funding": { "type": "github", @@ -2333,9 +2333,9 @@ } }, "node_modules/@tanstack/virtual-core": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.1.3.tgz", - "integrity": "sha512-Y5B4EYyv1j9V8LzeAoOVeTg0LI7Fo5InYKgAjkY1Pu9GjtUwX/EKxNcU7ng3sKr99WEf+bPTcktAeybyMOYo+g==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.2.0.tgz", + "integrity": "sha512-P5XgYoAw/vfW65byBbJQCw+cagdXDT/qH6wmABiLt4v4YBT2q2vqCOhihe+D1Nt325F/S/0Tkv6C5z0Lv+VBQQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" @@ -2375,11 +2375,11 @@ "devOptional": true }, "node_modules/@types/ramda": { - "version": "0.29.11", - "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.29.11.tgz", - "integrity": "sha512-jm1+PmNOpE7aPS+mMcuB4a72VkCXUJqPSaQRu2YqR8MbsFfaowYXgKxc7bluYdDpRHNXT5Z+xu+Lgr3/ml6wSA==", + "version": "0.29.12", + "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.29.12.tgz", + "integrity": "sha512-sgIEjpJhdQPB52gDF4aphs9nl0xe54CR22DPdWqT8gQHjZYmVApgA0R3/CpMbl0Y8az2TEZrPNL2zy0EvjbkLA==", "dependencies": { - "types-ramda": "^0.29.9" + "types-ramda": "^0.29.10" } }, "node_modules/@types/react": { @@ -2393,9 +2393,9 @@ } }, "node_modules/@types/react-dom": { - "version": "18.2.23", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.23.tgz", - "integrity": "sha512-ZQ71wgGOTmDYpnav2knkjr3qXdAFu0vsk8Ci5w3pGAIdj7/kKAyn+VsQDhXsmzzzepAiI9leWMmubXz690AI/A==", + "version": "18.2.24", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.24.tgz", + "integrity": "sha512-cN6upcKd8zkGy4HU9F1+/s98Hrp6D4MOcippK4PoE8OZRngohHZpbJn1GsaDLz87MqvHNoT13nHvNqM9ocRHZg==", "devOptional": true, "dependencies": { "@types/react": "*" @@ -2788,9 +2788,9 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "node_modules/aria-hidden": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.3.tgz", - "integrity": "sha512-xcLxITLe2HYa1cnYnwCjkOO1PqUHQpozB8x9AR0OgWN2woOBi5kSDVxKfd0b7sb1hw5qFeJhXm9H1nu3xSfLeQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz", + "integrity": "sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==", "dependencies": { "tslib": "^2.0.0" }, @@ -2829,11 +2829,11 @@ } }, "node_modules/axios": { - "version": "1.6.7", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz", - "integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==", + "version": "1.6.8", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz", + "integrity": "sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==", "dependencies": { - "follow-redirects": "^1.15.4", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -3029,9 +3029,9 @@ "integrity": "sha512-br21LjYmSlVL0vFCPWPfhzUCT34FM/pAdK7rRIZwa0rrtrIdotvP4Oh4GUHsu2E3IrQMCfRkL/fN3ytMNxVQvg==" }, "node_modules/codemirror-graphql": { - "version": "2.0.10", - "resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.0.10.tgz", - "integrity": "sha512-rC9NxibCsSzWtCQjHLfwKCkyYdGv2BT/BCgyDoKPrc/e7aGiyLyeT0fB60d+0imwlvhX3lIHncl6JMz2YxQ/jg==", + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.0.11.tgz", + "integrity": "sha512-j1QDDXKVkpin2VsyS0ke2nAhKal6/N1UJtgnBGrPe3gj9ZSP6/K8Xytft94k0xW6giIU/JhZjvW0GwwERNzbFA==", "dependencies": { "@types/codemirror": "^0.0.90", "graphql-language-service": "5.2.0" @@ -3108,9 +3108,9 @@ } }, "node_modules/core-js-pure": { - "version": "3.36.0", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.36.0.tgz", - "integrity": "sha512-cN28qmhRNgbMZZMc/RFu5w8pK9VJzpb2rJVR/lHuZJKwmXnoWOpXmMkxqBB514igkp1Hu8WGROsiOAzUcKdHOQ==", + "version": "3.36.1", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.36.1.tgz", + "integrity": "sha512-NXCvHvSVYSrewP0L5OhltzXeWFJLo2AL2TYnj6iLV3Bw8mM62wAQMNgUCRI6EBu6hVVpbCxmOPlxh1Ikw2PfUA==", "hasInstallScript": true, "funding": { "type": "opencollective", @@ -3220,9 +3220,9 @@ } }, "node_modules/detect-libc": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.2.tgz", - "integrity": "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", + "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", "optional": true, "engines": { "node": ">=8" @@ -3258,9 +3258,9 @@ } }, "node_modules/dompurify": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.9.tgz", - "integrity": "sha512-uyb4NDIvQ3hRn6NiC+SIFaP4mJ/MdXlvtunaqK9Bn6dD3RuB/1S/gasEjDHD8eiaqdSael2vBv+hOs7Y+jhYOQ==" + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.11.tgz", + "integrity": "sha512-Fan4uMuyB26gFV3ovPoEoQbxRRPfTu3CvImyZnhGq5fsIEO+gEFLp45ISFt+kQBWsK5ulDdT0oV28jS1UrwQLg==" }, "node_modules/drange": { "version": "1.1.1", @@ -3280,9 +3280,12 @@ } }, "node_modules/entities": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", - "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "engines": { + "node": ">=0.12" + }, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } @@ -3424,9 +3427,9 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.5", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.5.tgz", - "integrity": "sha512-D53FYKJa+fDmZMtriODxvhwrO+IOqrxoEo21gMA0sjHdU6dPVH4OhyFip9ypl8HOF5RV5KdTo+rBQLvnY2cO8w==", + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.6.tgz", + "integrity": "sha512-NjGXdm7zgcKRkKMua34qVO9doI7VOxZ6ancSvBELJSSoX97jyndXcSoa8XBh69JoB31dNz3EEzlMcizZl7LaMA==", "dev": true, "peerDependencies": { "eslint": ">=7" @@ -3945,14 +3948,14 @@ "dev": true }, "node_modules/graphiql": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.1.1.tgz", - "integrity": "sha512-FMNa981Wj8JBJJRTdryNyrVteigS8B7q+Q1fh1rW4IsFPaXNIs1VMs8kwqIZ8zERj4Fc64Ea750g3n6r2w9Zcg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.2.0.tgz", + "integrity": "sha512-HHZ9j47IVUdUhdEdOkwD/U3kMGxCGZocEf9rk1aou5lInK9vJRbjlDW4BbG9CvA5fNoe7DevRr72tv0ubvjjPA==", "dependencies": { - "@graphiql/react": "^0.20.3", + "@graphiql/react": "^0.21.0", "@graphiql/toolkit": "^0.9.1", "graphql-language-service": "^5.2.0", - "markdown-it": "^12.2.0" + "markdown-it": "^14.1.0" }, "peerDependencies": { "graphql": "^15.5.0 || ^16.0.0", @@ -4408,11 +4411,11 @@ } }, "node_modules/linkify-it": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", - "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", "dependencies": { - "uc.micro": "^1.0.1" + "uc.micro": "^2.0.0" } }, "node_modules/locate-path": { @@ -4482,24 +4485,25 @@ } }, "node_modules/markdown-it": { - "version": "12.3.2", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", - "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", "dependencies": { "argparse": "^2.0.1", - "entities": "~2.1.0", - "linkify-it": "^3.0.1", - "mdurl": "^1.0.1", - "uc.micro": "^1.0.5" + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" }, "bin": { - "markdown-it": "bin/markdown-it.js" + "markdown-it": "bin/markdown-it.mjs" } }, "node_modules/mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==" }, "node_modules/merge2": { "version": "1.4.1", @@ -4652,9 +4656,9 @@ "dev": true }, "node_modules/node-abi": { - "version": "3.56.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.56.0.tgz", - "integrity": "sha512-fZjdhDOeRcaS+rcpve7XuwHBmktS1nS1gzgghwKUQQ8nTy2FdSDr6ZT8k6YhvlJeHmmQMYiT/IH9hfco5zeW2Q==", + "version": "3.57.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.57.0.tgz", + "integrity": "sha512-Dp+A9JWxRaKuHP35H77I4kCKesDy5HUDEmScia2FyncMTOXASMyg251F5PhFoDA5uqBrDDffiLpbqnrZmNXW+g==", "optional": true, "dependencies": { "semver": "^7.3.5" @@ -5073,6 +5077,14 @@ "node": ">=6" } }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "engines": { + "node": ">=6" + } + }, "node_modules/qs": { "version": "6.12.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.12.0.tgz", @@ -5312,9 +5324,9 @@ } }, "node_modules/react-remove-scroll-bar": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.5.tgz", - "integrity": "sha512-3cqjOqg6s0XbOjWvmasmqHch+RLxIEk2r/70rzGXuz3iIGQsQheEQyqYCBb5EECoD01Vo2SIbDqW4paLeLTASw==", + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.6.tgz", + "integrity": "sha512-DtSYaao4mBmX+HDo5YWYdBWQwYIQQshUV/dVxFxK+KM26Wjwp1gZ6rv6OC3oujI6Bfu6Xyg3TwK533AQutsn/g==", "dependencies": { "react-style-singleton": "^2.2.1", "tslib": "^2.0.0" @@ -5850,16 +5862,16 @@ } }, "node_modules/swagger-client": { - "version": "3.26.0", - "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.26.0.tgz", - "integrity": "sha512-1yFR/S2V3v5DwgmNePoHEjq2dZJxDx1leDQ53r5M4hZs+dozm9VnznlSl9a1V5iTYw4UsS4PQuBRQsmBH21ViA==", + "version": "3.26.6", + "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.26.6.tgz", + "integrity": "sha512-PYYca8BsamZaOjlKr5ombOTeDje1ddiYAKGstpmAU3iU+mBKgqHiw5G2J77SC9+chKU5y0aJzmQX4hNu3p2y5A==", "dependencies": { "@babel/runtime-corejs3": "^7.22.15", - "@swagger-api/apidom-core": ">=0.97.0 <1.0.0", - "@swagger-api/apidom-error": ">=0.97.0 <1.0.0", - "@swagger-api/apidom-json-pointer": ">=0.97.0 <1.0.0", - "@swagger-api/apidom-ns-openapi-3-1": ">=0.97.0 <1.0.0", - "@swagger-api/apidom-reference": ">=0.97.0 <1.0.0", + "@swagger-api/apidom-core": ">=0.99.0 <1.0.0", + "@swagger-api/apidom-error": ">=0.99.0 <1.0.0", + "@swagger-api/apidom-json-pointer": ">=0.99.0 <1.0.0", + "@swagger-api/apidom-ns-openapi-3-1": ">=0.99.0 <1.0.0", + "@swagger-api/apidom-reference": ">=0.99.0 <1.0.0", "cookie": "~0.6.0", "deepmerge": "~4.3.0", "fast-json-patch": "^3.0.0-1", @@ -5880,17 +5892,17 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.12.0", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.12.0.tgz", - "integrity": "sha512-9QHLaGuo1x8jyGjGQMPOtcDFYH4lFb7L+FIYtplldUzo/JFgwfgUnpnUWadnhkVRJEGsHJqoy7IWJ4dK7WaPRQ==", + "version": "5.14.0", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.14.0.tgz", + "integrity": "sha512-6RUCv7ii6jcdMu9LpNuq5gGboxlS2HfNeeDPdWML0Wt3lwOc7yroOzux08a8FtwFQB5ridprYobcZXVQvLcyjQ==", "dependencies": { - "@babel/runtime-corejs3": "^7.24.0", - "@braintree/sanitize-url": "=7.0.0", + "@babel/runtime-corejs3": "^7.24.4", + "@braintree/sanitize-url": "=7.0.1", "base64-js": "^1.5.1", "classnames": "^2.5.1", "css.escape": "1.5.1", "deep-extend": "0.6.0", - "dompurify": "=3.0.9", + "dompurify": "=3.0.11", "ieee754": "^1.2.1", "immutable": "^3.x.x", "js-file-download": "^0.4.12", @@ -5913,7 +5925,7 @@ "reselect": "^5.1.0", "serialize-error": "^8.1.0", "sha.js": "^2.4.11", - "swagger-client": "^3.26.0", + "swagger-client": "^3.26.5", "url-parse": "^1.5.10", "xml": "=1.0.1", "xml-but-prettier": "^1.0.1", @@ -6090,9 +6102,9 @@ } }, "node_modules/types-ramda": { - "version": "0.29.9", - "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.9.tgz", - "integrity": "sha512-B+VbLtW68J4ncG/rccKaYDhlirKlVH/Izh2JZUfaPJv+3Tl2jbbgYsB1pvole1vXKSgaPlAe/wgEdOnMdAu52A==", + "version": "0.29.10", + "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.10.tgz", + "integrity": "sha512-5PJiW/eiTPyXXBYGZOYGezMl6qj7keBiZheRwfjJZY26QPHsNrjfJnz0mru6oeqqoTHOni893Jfd6zyUXfQRWg==", "dependencies": { "ts-toolbelt": "^9.6.0" } @@ -6111,9 +6123,9 @@ } }, "node_modules/uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" }, "node_modules/universalify": { "version": "2.0.1", @@ -6147,9 +6159,9 @@ } }, "node_modules/use-callback-ref": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.1.tgz", - "integrity": "sha512-Lg4Vx1XZQauB42Hw3kK7JM6yjVjgFmFC5/Ab797s79aARomD2nEErc4mCgM8EZrARLmmbWpi5DGCadmK50DcAQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.2.tgz", + "integrity": "sha512-elOQwe6Q8gqZgDA8mrh44qRTQqpIHDcZ3hXTLjBe1i4ph8XpNJnO+aQf3NaG+lriLopI4HMx9VjQLfPQ6vhnoA==", "dependencies": { "tslib": "^2.0.0" }, diff --git a/playground/package.json b/playground/package.json index d2cc478f8a..67a0cd10a8 100644 --- a/playground/package.json +++ b/playground/package.json @@ -10,22 +10,22 @@ "preview": "vite preview" }, "dependencies": { - "graphiql": "^3.1.1", + "graphiql": "^3.2.0", "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.12.0" + "swagger-ui-react": "^5.14.0" }, "devDependencies": { "@types/react": "^18.2.74", - "@types/react-dom": "^18.2.23", + "@types/react-dom": "^18.2.24", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.5.0", "@typescript-eslint/parser": "^7.3.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", - "eslint-plugin-react-refresh": "^0.4.5", + "eslint-plugin-react-refresh": "^0.4.6", "typescript": "^5.4.3", "vite": "^5.2.8" } From 8b55ebc7444d0f5c0083fb86a31b2ad871eb0551 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Apr 2024 16:00:42 -0400 Subject: [PATCH 24/49] bot: Bump swagger-ui-react from 5.14.0 to 5.15.0 in /playground (#2514) Bumps [swagger-ui-react](https://github.com/swagger-api/swagger-ui) from 5.14.0 to 5.15.0.
Release notes

Sourced from swagger-ui-react's releases.

Swagger UI v5.15.0 Released!

5.15.0 (2024-04-10)

Bug Fixes

  • json-schema-2020-12-samples: apply string constraints sensibly (#9796) (b6b0d28), closes #9739
  • oas31: allow override names of top level schemas (#9787) (111e420), closes #9713
  • oas3: compensate for JSON Schemas left unresolved by swagger-client (#9794) (3bea389), closes #9790

Features

Commits
  • 13aa3bf chore(release): cut the v5.15.0 release
  • 3bea389 fix(oas3): compensate for JSON Schemas left unresolved by swagger-client (#9794)
  • b6b0d28 fix(json-schema-2020-12-samples): apply string constraints sensibly (#9796)
  • 7db9c98 feat(json-schema-2020-12-sample): introduce option API (#9795)
  • 1267c04 chore(deps-dev): bump eslint-plugin-jest from 27.9.0 to 28.2.0 (#9793)
  • 6e91056 chore(deps): bump dompurify from 3.0.11 to 3.1.0 (#9789)
  • 111e420 fix(oas31): allow override names of top level schemas (#9787)
  • af538a3 chore(deps): bump dependabot/fetch-metadata from 1.6.0 to 2.0.0 (#9729)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=swagger-ui-react&package-manager=npm_and_yarn&previous-version=5.14.0&new-version=5.15.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 16 ++++++++-------- playground/package.json | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 054f60868e..5f0e79309e 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -12,7 +12,7 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.14.0" + "swagger-ui-react": "^5.15.0" }, "devDependencies": { "@types/react": "^18.2.74", @@ -3258,9 +3258,9 @@ } }, "node_modules/dompurify": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.11.tgz", - "integrity": "sha512-Fan4uMuyB26gFV3ovPoEoQbxRRPfTu3CvImyZnhGq5fsIEO+gEFLp45ISFt+kQBWsK5ulDdT0oV28jS1UrwQLg==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.0.tgz", + "integrity": "sha512-yoU4rhgPKCo+p5UrWWWNKiIq+ToGqmVVhk0PmMYBK4kRsR3/qhemNFL8f6CFmBd4gMwm3F4T7HBoydP5uY07fA==" }, "node_modules/drange": { "version": "1.1.1", @@ -5892,9 +5892,9 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.14.0", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.14.0.tgz", - "integrity": "sha512-6RUCv7ii6jcdMu9LpNuq5gGboxlS2HfNeeDPdWML0Wt3lwOc7yroOzux08a8FtwFQB5ridprYobcZXVQvLcyjQ==", + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.15.0.tgz", + "integrity": "sha512-ptagY3F/4Mo1kW1i7m8RJuHOH6zNOmU3LBEqMnTeaYQ9cPUTCpGjLTxWG0IIXasOIpx4kMJqwYhQcFOuw6LsZg==", "dependencies": { "@babel/runtime-corejs3": "^7.24.4", "@braintree/sanitize-url": "=7.0.1", @@ -5902,7 +5902,7 @@ "classnames": "^2.5.1", "css.escape": "1.5.1", "deep-extend": "0.6.0", - "dompurify": "=3.0.11", + "dompurify": "=3.1.0", "ieee754": "^1.2.1", "immutable": "^3.x.x", "js-file-download": "^0.4.12", diff --git a/playground/package.json b/playground/package.json index 67a0cd10a8..e5bbd5dcd7 100644 --- a/playground/package.json +++ b/playground/package.json @@ -14,7 +14,7 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.14.0" + "swagger-ui-react": "^5.15.0" }, "devDependencies": { "@types/react": "^18.2.74", From dd7f56baee760f3e5a9895c012f2d5fbbf4106ef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Apr 2024 16:36:04 -0400 Subject: [PATCH 25/49] bot: Bump typescript from 5.4.3 to 5.4.5 in /playground (#2515) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [typescript](https://github.com/Microsoft/TypeScript) from 5.4.3 to 5.4.5.
Release notes

Sourced from typescript's releases.

TypeScript 5.4.5

For release notes, check out the release announcement.

For the complete list of fixed issues, check out the

Downloads are available on:

TypeScript 5.4.4

For release notes, check out the release announcement.

For the complete list of fixed issues, check out the

Downloads are available on:

Commits
  • 27bcd4c Update LKG
  • 9f33bf1 🤖 Pick PR #58098 (Fix constraints of nested homomorph...) into release-5.4 (#...
  • 71b2f84 Bump version to 5.4.5 and LKG
  • 892936f 🤖 Pick PR #58083 (Don't propagate partial union/inter...) into release-5.4 (#...
  • 38a7c05 release-5.4: Always set node-version for setup-node (#58117)
  • b754fc3 🤖 Pick PR #57778 (fix type import check for default-i...) into release-5.4 (#...
  • 8eb3367 Bump version to 5.4.4 and LKG
  • de9096b 🤖 Pick PR #57871 (Divide-and-conquer strategy for int...) into release-5.4 (#...
  • 06aae98 🤖 Pick PR #57973 (Compare package.json paths with cor...) into release-5.4 (#...
  • 6d8134e 🤖 Pick PR #57637 (Fixed a regression related to deter...) into release-5.4 (#...
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typescript&package-manager=npm_and_yarn&previous-version=5.4.3&new-version=5.4.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 5f0e79309e..e0fad98390 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -24,7 +24,7 @@ "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.6", - "typescript": "^5.4.3", + "typescript": "^5.4.5", "vite": "^5.2.8" } }, @@ -6110,9 +6110,9 @@ } }, "node_modules/typescript": { - "version": "5.4.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz", - "integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==", + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", + "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", "dev": true, "bin": { "tsc": "bin/tsc", diff --git a/playground/package.json b/playground/package.json index e5bbd5dcd7..a0332b211b 100644 --- a/playground/package.json +++ b/playground/package.json @@ -26,7 +26,7 @@ "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.6", - "typescript": "^5.4.3", + "typescript": "^5.4.5", "vite": "^5.2.8" } } From febcbe05452a43d356812911f27e3eaca94e69e7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Apr 2024 16:54:18 -0400 Subject: [PATCH 26/49] bot: Bump go.opentelemetry.io/otel/sdk/metric from 1.24.0 to 1.25.0 (#2499) Bumps [go.opentelemetry.io/otel/sdk/metric](https://github.com/open-telemetry/opentelemetry-go) from 1.24.0 to 1.25.0.
Changelog

Sourced from go.opentelemetry.io/otel/sdk/metric's changelog.

[1.25.0/0.47.0/0.0.8/0.1.0-alpha] 2024-04-05

Added

  • Add WithProxy option in go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp. (#4906)
  • Add WithProxy option in go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlptracehttp. (#4906)
  • Add AddLink method to the Span interface in go.opentelemetry.io/otel/trace. (#5032)
  • The Enabled method is added to the Logger interface in go.opentelemetry.io/otel/log. This method is used to notify users if a log record will be emitted or not. (#5071)
  • Add SeverityUndefined const to go.opentelemetry.io/otel/log. This value represents an unset severity level. (#5072)
  • Add Empty function in go.opentelemetry.io/otel/log to return a KeyValue for an empty value. (#5076)
  • Add go.opentelemetry.io/otel/log/global to manage the global LoggerProvider. This package is provided with the anticipation that all functionality will be migrate to go.opentelemetry.io/otel when go.opentelemetry.io/otel/log stabilizes. At which point, users will be required to migrage their code, and this package will be deprecated then removed. (#5085)
  • Add support for Summary metrics in the go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp and go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc exporters. (#5100)
  • Add otel.scope.name and otel.scope.version tags to spans exported by go.opentelemetry.io/otel/exporters/zipkin. (#5108)
  • Add support for AddLink to go.opentelemetry.io/otel/bridge/opencensus. (#5116)
  • Add String method to Value and KeyValue in go.opentelemetry.io/otel/log. (#5117)
  • Add Exemplar support to go.opentelemetry.io/otel/exporters/prometheus. (#5111)
  • Add metric semantic conventions to go.opentelemetry.io/otel/semconv/v1.24.0. Future semconv packages will include metric semantic conventions as well. (#4528)

Changed

  • SpanFromContext and SpanContextFromContext in go.opentelemetry.io/otel/trace no longer make a heap allocation when the passed context has no span. (#5049)
  • go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc and go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc now create a gRPC client in idle mode and with "dns" as the default resolver using grpc.NewClient. (#5151) Because of that WithDialOption ignores grpc.WithBlock, grpc.WithTimeout, and grpc.WithReturnConnectionError. Notice that grpc.DialContext which was used before is now deprecated.

Fixed

  • Clarify the documentation about equivalence guarantees for the Set and Distinct types in go.opentelemetry.io/otel/attribute. (#5027)
  • Prevent default ErrorHandler self-delegation. (#5137)
  • Update all dependencies to address [GO-2024-2687]. (#5139)

Removed

  • Drop support for [Go 1.20]. (#4967)

Deprecated

  • Deprecate go.opentelemetry.io/otel/attribute.Sortable type. (#4734)
  • Deprecate go.opentelemetry.io/otel/attribute.NewSetWithSortable function. (#4734)
  • Deprecate go.opentelemetry.io/otel/attribute.NewSetWithSortableFiltered function. (#4734)
Commits
  • 6628407 Release v1.25.0/v0.47.0/v0.0.8/v0.1.0-alpha (#5154)
  • 82b49b4 Add otlploghttp package documentation (#5150)
  • 5bd5582 dependabot updates Thu Apr 4 20:43:44 UTC 2024 (#5155)
  • 6394b02 semconv: Add metric generation (#4880)
  • 35c9570 Prevent default ErrorHandler self-delegation (#5137)
  • 98fd439 Bump bump google.golang.org/grpc from 1.62.1 to 1.63.0 and use grpc.NewClient...
  • 0168437 Add exemplar support to the prometheus exporter (#5111)
  • e6e4e4a build(deps): bump benchmark-action/github-action-benchmark (#5142)
  • 73ee3bb build(deps): bump codecov/codecov-action from 4.1.1 to 4.2.0 (#5140)
  • 14e3f6b Add otlploghttp exporter skeleton (#5138)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=go.opentelemetry.io/otel/sdk/metric&package-manager=go_modules&previous-version=1.24.0&new-version=1.25.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 6 +++--- go.sum | 11 ++++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index e22cbe9f3c..9ef7ab83e4 100644 --- a/go.mod +++ b/go.mod @@ -45,7 +45,7 @@ require ( github.com/valyala/fastjson v1.6.4 github.com/vito/go-sse v1.0.0 go.opentelemetry.io/otel/metric v1.25.0 - go.opentelemetry.io/otel/sdk/metric v1.24.0 + go.opentelemetry.io/otel/sdk/metric v1.25.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 google.golang.org/grpc v1.62.1 @@ -283,7 +283,7 @@ require ( go.etcd.io/bbolt v1.3.8 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/otel v1.25.0 // indirect - go.opentelemetry.io/otel/sdk v1.24.0 // indirect + go.opentelemetry.io/otel/sdk v1.25.0 // indirect go.opentelemetry.io/otel/trace v1.25.0 // indirect go.uber.org/dig v1.17.1 // indirect go.uber.org/fx v1.20.1 // indirect @@ -293,7 +293,7 @@ require ( golang.org/x/mod v0.15.0 // indirect golang.org/x/net v0.21.0 // indirect golang.org/x/sync v0.6.0 // indirect - golang.org/x/sys v0.17.0 // indirect + golang.org/x/sys v0.18.0 // indirect golang.org/x/term v0.17.0 // indirect golang.org/x/text v0.14.0 // indirect golang.org/x/tools v0.18.0 // indirect diff --git a/go.sum b/go.sum index c4e7132f38..f5fd826cbd 100644 --- a/go.sum +++ b/go.sum @@ -1187,10 +1187,10 @@ go.opentelemetry.io/otel v1.25.0 h1:gldB5FfhRl7OJQbUHt/8s0a7cE8fbsPAtdpRaApKy4k= go.opentelemetry.io/otel v1.25.0/go.mod h1:Wa2ds5NOXEMkCmUou1WA7ZBfLTHWIsp034OVD7AO+Vg= go.opentelemetry.io/otel/metric v1.25.0 h1:LUKbS7ArpFL/I2jJHdJcqMGxkRdxpPHE0VU/D4NuEwA= go.opentelemetry.io/otel/metric v1.25.0/go.mod h1:rkDLUSd2lC5lq2dFNrX9LGAbINP5B7WBkC78RXCpH5s= -go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= -go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= -go.opentelemetry.io/otel/sdk/metric v1.24.0 h1:yyMQrPzF+k88/DbH7o4FMAs80puqd+9osbiBrJrz/w8= -go.opentelemetry.io/otel/sdk/metric v1.24.0/go.mod h1:I6Y5FjH6rvEnTTAYQz3Mmv2kl6Ek5IIrmwTLqMrrOE0= +go.opentelemetry.io/otel/sdk v1.25.0 h1:PDryEJPC8YJZQSyLY5eqLeafHtG+X7FWnf3aXMtxbqo= +go.opentelemetry.io/otel/sdk v1.25.0/go.mod h1:oFgzCM2zdsxKzz6zwpTZYLLQsFwc+K0daArPdIhuxkw= +go.opentelemetry.io/otel/sdk/metric v1.25.0 h1:7CiHOy08LbrxMAp4vWpbiPcklunUshVpAvGBrdDRlGw= +go.opentelemetry.io/otel/sdk/metric v1.25.0/go.mod h1:LzwoKptdbBBdYfvtGCzGwk6GWMA3aUzBOwtQpR6Nz7o= go.opentelemetry.io/otel/trace v1.25.0 h1:tqukZGLwQYRIFtSQM2u2+yfMVTgGVeqRLPUYx1Dq6RM= go.opentelemetry.io/otel/trace v1.25.0/go.mod h1:hCCs70XM/ljO+BeQkyFnbK28SBIJ/Emuha+ccrCRT7I= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= @@ -1392,8 +1392,9 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= From ef228b84d0894abf4c18502dea9c19548fbedef3 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 12 Apr 2024 10:07:21 -0700 Subject: [PATCH 27/49] refactor: DB transactions context (#2513) ## Relevant issue(s) Resolves #2512 Resolves #2516 ## Description This PR moves the db transactions to the context. Notable Changes: - moved `txn_db.go` to `store.go` - ~replaced `explicitTxnDB` and `implicitTxnDb` with `store`~ - ~added `db/session.go` to simplify setting context~ - added `db/context.go` to manage context values ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- cli/client.go | 2 +- cli/collection.go | 7 +- cli/index_create.go | 4 - cli/index_drop.go | 5 - cli/index_list.go | 5 - cli/schema_migration_down.go | 9 +- cli/schema_migration_reload.go | 9 +- cli/schema_migration_up.go | 9 +- cli/utils.go | 44 +- client/collection.go | 6 - client/db.go | 3 - client/lens.go | 8 - db/backup.go | 9 +- db/collection.go | 96 +--- db/collection_delete.go | 21 +- db/collection_get.go | 6 +- db/collection_index.go | 38 +- db/collection_update.go | 24 +- db/context.go | 68 +++ db/context_test.go | 57 +++ db/db.go | 13 +- db/db_test.go | 2 +- db/index_test.go | 23 +- db/indexed_docs_test.go | 15 +- db/request.go | 2 +- db/store.go | 275 ++++++++++++ db/subscriptions.go | 4 +- db/txn_db.go | 422 ------------------ http/client.go | 5 - http/client_collection.go | 8 - http/client_lens.go | 6 - http/handler.go | 2 - http/handler_ccip.go | 2 +- http/handler_collection.go | 2 +- http/handler_lens.go | 16 +- http/handler_store.go | 24 +- http/http_client.go | 16 +- http/middleware.go | 61 +-- net/peer_collection.go | 11 +- net/peer_replicator.go | 17 +- net/server.go | 25 +- planner/create.go | 2 +- planner/delete.go | 2 +- planner/update.go | 2 +- tests/bench/query/planner/utils.go | 10 +- tests/clients/cli/wrapper.go | 9 +- tests/clients/cli/wrapper_cli.go | 21 +- tests/clients/cli/wrapper_collection.go | 10 +- tests/clients/cli/wrapper_lens.go | 5 - tests/clients/http/wrapper.go | 4 - .../events/simple/with_create_txn_test.go | 5 +- tests/integration/lens.go | 6 +- tests/integration/utils2.go | 36 +- 53 files changed, 630 insertions(+), 863 deletions(-) create mode 100644 db/context.go create mode 100644 db/context_test.go create mode 100644 db/store.go delete mode 100644 db/txn_db.go diff --git a/cli/client.go b/cli/client.go index 532712e8f8..475f83a80a 100644 --- a/cli/client.go +++ b/cli/client.go @@ -31,7 +31,7 @@ Execute queries, add schema types, obtain node info, etc.`, if err := setContextTransaction(cmd, txID); err != nil { return err } - return setContextStore(cmd) + return setContextDB(cmd) }, } cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") diff --git a/cli/collection.go b/cli/collection.go index 23ef9194ae..5b682e5366 100644 --- a/cli/collection.go +++ b/cli/collection.go @@ -17,7 +17,6 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) func MakeCollectionCommand() *cobra.Command { @@ -41,7 +40,7 @@ func MakeCollectionCommand() *cobra.Command { if err := setContextTransaction(cmd, txID); err != nil { return err } - if err := setContextStore(cmd); err != nil { + if err := setContextDB(cmd); err != nil { return err } store := mustGetContextStore(cmd) @@ -71,10 +70,6 @@ func MakeCollectionCommand() *cobra.Command { } col := cols[0] - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } - ctx := context.WithValue(cmd.Context(), colContextKey, col) cmd.SetContext(ctx) return nil diff --git a/cli/index_create.go b/cli/index_create.go index bfe5ec64c2..0d724da15b 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -14,7 +14,6 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) func MakeIndexCreateCommand() *cobra.Command { @@ -52,9 +51,6 @@ Example: create a named index for 'Users' collection on 'name' field: if err != nil { return err } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } desc, err = col.CreateIndex(cmd.Context(), desc) if err != nil { return err diff --git a/cli/index_drop.go b/cli/index_drop.go index 96f007268d..5dd069b5da 100644 --- a/cli/index_drop.go +++ b/cli/index_drop.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeIndexDropCommand() *cobra.Command { @@ -34,9 +32,6 @@ Example: drop the index 'UsersByName' for 'Users' collection: if err != nil { return err } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } return col.DropIndex(cmd.Context(), nameArg) }, } diff --git a/cli/index_list.go b/cli/index_list.go index bf1fd21251..481acb7d37 100644 --- a/cli/index_list.go +++ b/cli/index_list.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeIndexListCommand() *cobra.Command { @@ -38,9 +36,6 @@ Example: show all index for 'Users' collection: if err != nil { return err } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } indexes, err := col.GetIndexes(cmd.Context()) if err != nil { return err diff --git a/cli/schema_migration_down.go b/cli/schema_migration_down.go index 1d7622257c..a49f359694 100644 --- a/cli/schema_migration_down.go +++ b/cli/schema_migration_down.go @@ -17,8 +17,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeSchemaMigrationDownCommand() *cobra.Command { @@ -67,12 +65,7 @@ Example: migrate from stdin if err := json.Unmarshal(srcData, &src); err != nil { return err } - lens := store.LensRegistry() - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - lens = lens.WithTxn(tx) - } - - out, err := lens.MigrateDown(cmd.Context(), enumerable.New(src), collectionID) + out, err := store.LensRegistry().MigrateDown(cmd.Context(), enumerable.New(src), collectionID) if err != nil { return err } diff --git a/cli/schema_migration_reload.go b/cli/schema_migration_reload.go index 4266b3ec3f..8ffb5542f1 100644 --- a/cli/schema_migration_reload.go +++ b/cli/schema_migration_reload.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeSchemaMigrationReloadCommand() *cobra.Command { @@ -23,12 +21,7 @@ func MakeSchemaMigrationReloadCommand() *cobra.Command { Long: `Reload the schema migrations within DefraDB`, RunE: func(cmd *cobra.Command, args []string) error { store := mustGetContextStore(cmd) - - lens := store.LensRegistry() - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - lens = lens.WithTxn(tx) - } - return lens.ReloadLenses(cmd.Context()) + return store.LensRegistry().ReloadLenses(cmd.Context()) }, } return cmd diff --git a/cli/schema_migration_up.go b/cli/schema_migration_up.go index 577b87d4c7..4473c45911 100644 --- a/cli/schema_migration_up.go +++ b/cli/schema_migration_up.go @@ -17,8 +17,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/datastore" ) func MakeSchemaMigrationUpCommand() *cobra.Command { @@ -67,12 +65,7 @@ Example: migrate from stdin if err := json.Unmarshal(srcData, &src); err != nil { return err } - lens := store.LensRegistry() - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - lens = lens.WithTxn(tx) - } - - out, err := lens.MigrateUp(cmd.Context(), enumerable.New(src), collectionID) + out, err := store.LensRegistry().MigrateUp(cmd.Context(), enumerable.New(src), collectionID) if err != nil { return err } diff --git a/cli/utils.go b/cli/utils.go index f923021fcf..1df10a3409 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -21,7 +21,7 @@ import ( "github.com/spf13/viper" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/http" ) @@ -32,17 +32,8 @@ var ( cfgContextKey = contextKey("cfg") // rootDirContextKey is the context key for the root directory. rootDirContextKey = contextKey("rootDir") - // txContextKey is the context key for the datastore.Txn - // - // This will only be set if a transaction id is specified. - txContextKey = contextKey("tx") // dbContextKey is the context key for the client.DB dbContextKey = contextKey("db") - // storeContextKey is the context key for the client.Store - // - // If a transaction exists, all operations will be executed - // in the current transaction context. - storeContextKey = contextKey("store") // colContextKey is the context key for the client.Collection // // If a transaction exists, all operations will be executed @@ -61,7 +52,7 @@ func mustGetContextDB(cmd *cobra.Command) client.DB { // // If a store is not set in the current context this function panics. func mustGetContextStore(cmd *cobra.Command) client.Store { - return cmd.Context().Value(storeContextKey).(client.Store) + return cmd.Context().Value(dbContextKey).(client.Store) } // mustGetContextP2P returns the p2p implementation for the current command context. @@ -92,6 +83,18 @@ func tryGetContextCollection(cmd *cobra.Command) (client.Collection, bool) { return col, ok } +// setContextDB sets the db for the current command context. +func setContextDB(cmd *cobra.Command) error { + cfg := mustGetContextConfig(cmd) + db, err := http.NewClient(cfg.GetString("api.address")) + if err != nil { + return err + } + ctx := context.WithValue(cmd.Context(), dbContextKey, db) + cmd.SetContext(ctx) + return nil +} + // setContextConfig sets teh config for the current command context. func setContextConfig(cmd *cobra.Command) error { rootdir := mustGetContextRootDir(cmd) @@ -115,24 +118,7 @@ func setContextTransaction(cmd *cobra.Command, txId uint64) error { if err != nil { return err } - ctx := context.WithValue(cmd.Context(), txContextKey, tx) - cmd.SetContext(ctx) - return nil -} - -// setContextStore sets the store for the current command context. -func setContextStore(cmd *cobra.Command) error { - cfg := mustGetContextConfig(cmd) - db, err := http.NewClient(cfg.GetString("api.address")) - if err != nil { - return err - } - ctx := context.WithValue(cmd.Context(), dbContextKey, db) - if tx, ok := ctx.Value(txContextKey).(datastore.Txn); ok { - ctx = context.WithValue(ctx, storeContextKey, db.WithTxn(tx)) - } else { - ctx = context.WithValue(ctx, storeContextKey, db) - } + ctx := db.SetContextTxn(cmd.Context(), tx) cmd.SetContext(ctx) return nil } diff --git a/client/collection.go b/client/collection.go index aa219b3a74..bab61607a9 100644 --- a/client/collection.go +++ b/client/collection.go @@ -14,8 +14,6 @@ import ( "context" "github.com/sourcenetwork/immutable" - - "github.com/sourcenetwork/defradb/datastore" ) // Collection represents a defradb collection. @@ -192,10 +190,6 @@ type Collection interface { showDeleted bool, ) (*Document, error) - // WithTxn returns a new instance of the collection, with a transaction - // handle instead of a raw DB handle. - WithTxn(datastore.Txn) Collection - // GetAllDocIDs returns all the document IDs that exist in the collection. GetAllDocIDs(ctx context.Context, identity immutable.Option[string]) (<-chan DocIDResult, error) diff --git a/client/db.go b/client/db.go index a5d855f137..cedd63d492 100644 --- a/client/db.go +++ b/client/db.go @@ -42,9 +42,6 @@ type DB interface { // can safely operate on it concurrently. NewConcurrentTxn(context.Context, bool) (datastore.Txn, error) - // WithTxn returns a new [client.Store] that respects the given transaction. - WithTxn(datastore.Txn) Store - // Root returns the underlying root store, within which all data managed by DefraDB is held. Root() datastore.RootStore diff --git a/client/lens.go b/client/lens.go index 1a6b423991..3f5befc604 100644 --- a/client/lens.go +++ b/client/lens.go @@ -15,8 +15,6 @@ import ( "github.com/lens-vm/lens/host-go/config/model" "github.com/sourcenetwork/immutable/enumerable" - - "github.com/sourcenetwork/defradb/datastore" ) // LensConfig represents the configuration of a Lens migration in Defra. @@ -43,12 +41,6 @@ type LensConfig struct { // LensRegistry exposes several useful thread-safe migration related functions which may // be used to manage migrations. type LensRegistry interface { - // WithTxn returns a new LensRegistry scoped to the given transaction. - // - // WARNING: Currently this does not provide snapshot isolation, if other transactions are committed - // after this has been created, the results of those commits will be visible within this scope. - WithTxn(datastore.Txn) LensRegistry - // SetMigration caches the migration for the given collection ID. It does not persist the migration in long // term storage, for that one should call [Store.SetMigration(ctx, cfg)]. // diff --git a/db/backup.go b/db/backup.go index 2d3b824be1..17110bec05 100644 --- a/db/backup.go +++ b/db/backup.go @@ -92,7 +92,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin } // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to backup - err = col.WithTxn(txn).Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, acpIdentity.NoIdentity, doc) if err != nil { return NewErrDocCreate(err) } @@ -104,7 +104,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin return NewErrDocUpdate(err) } // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to backup - err = col.WithTxn(txn).Update(ctx, acpIdentity.NoIdentity, doc) + err = col.Update(ctx, acpIdentity.NoIdentity, doc) if err != nil { return NewErrDocUpdate(err) } @@ -191,9 +191,8 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return err } - colTxn := col.WithTxn(txn) // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to export - docIDsCh, err := colTxn.GetAllDocIDs(ctx, acpIdentity.NoIdentity) + docIDsCh, err := col.GetAllDocIDs(ctx, acpIdentity.NoIdentity) if err != nil { return err } @@ -210,7 +209,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client } } // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to export - doc, err := colTxn.Get(ctx, acpIdentity.NoIdentity, docResultWithID.ID, false) + doc, err := col.Get(ctx, acpIdentity.NoIdentity, docResultWithID.ID, false) if err != nil { return err } diff --git a/db/collection.go b/db/collection.go index d7364df3b2..1afa1c775a 100644 --- a/db/collection.go +++ b/db/collection.go @@ -46,18 +46,8 @@ var _ client.Collection = (*collection)(nil) // collection stores data records at Documents, which are gathered // together under a collection name. This is analogous to SQL Tables. type collection struct { - db *db - - // txn represents any externally provided [datastore.Txn] for which any - // operation on this [collection] instance should be scoped to. - // - // If this has no value, operations requiring a transaction should use an - // implicit internally managed transaction, which only lives for duration - // of the operation in question. - txn immutable.Option[datastore.Txn] - - def client.CollectionDefinition - + db *db + def client.CollectionDefinition indexes []CollectionIndex fetcherFactory func() fetcher.Fetcher } @@ -1240,11 +1230,10 @@ func (c *collection) GetAllDocIDs( ctx context.Context, identity immutable.Option[string], ) (<-chan client.DocIDResult, error) { - txn, err := c.getTxn(ctx, true) + ctx, txn, err := ensureContextTxn(ctx, c.db, true) if err != nil { return nil, err } - return c.getAllDocIDsChan(ctx, identity, txn) } @@ -1271,7 +1260,7 @@ func (c *collection) getAllDocIDsChan( log.ErrorContextE(ctx, errFailedtoCloseQueryReqAllIDs, err) } close(resCh) - c.discardImplicitTxn(ctx, txn) + txn.Discard(ctx) }() for res := range q.Next() { // check for Done on context first @@ -1351,18 +1340,6 @@ func (c *collection) Definition() client.CollectionDefinition { return c.def } -// WithTxn returns a new instance of the collection, with a transaction -// handle instead of a raw DB handle. -func (c *collection) WithTxn(txn datastore.Txn) client.Collection { - return &collection{ - db: c.db, - txn: immutable.Some(txn), - def: c.def, - indexes: c.indexes, - fetcherFactory: c.fetcherFactory, - } -} - // Create a new document. // Will verify the DocID/CID to ensure that the new document is correctly formatted. func (c *collection) Create( @@ -1370,18 +1347,18 @@ func (c *collection) Create( identity immutable.Option[string], doc *client.Document, ) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) err = c.create(ctx, identity, txn, doc) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } // CreateMany creates a collection of documents at once. @@ -1391,11 +1368,11 @@ func (c *collection) CreateMany( identity immutable.Option[string], docs []*client.Document, ) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) for _, doc := range docs { err = c.create(ctx, identity, txn, doc) @@ -1403,7 +1380,7 @@ func (c *collection) CreateMany( return err } } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } func (c *collection) getDocIDAndPrimaryKeyFromDoc( @@ -1476,11 +1453,11 @@ func (c *collection) Update( identity immutable.Option[string], doc *client.Document, ) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) @@ -1499,7 +1476,7 @@ func (c *collection) Update( return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } // Contract: DB Exists check is already performed, and a doc with the given ID exists. @@ -1541,11 +1518,11 @@ func (c *collection) Save( identity immutable.Option[string], doc *client.Document, ) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) // Check if document already exists with primary DS key. primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) @@ -1567,7 +1544,7 @@ func (c *collection) Save( return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } // save saves the document state. save MUST not be called outside the `c.create` @@ -1823,11 +1800,11 @@ func (c *collection) Delete( identity immutable.Option[string], docID client.DocID, ) (bool, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return false, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) @@ -1835,7 +1812,7 @@ func (c *collection) Delete( if err != nil { return false, err } - return true, c.commitImplicitTxn(ctx, txn) + return true, txn.Commit(ctx) } // Exists checks if a given document exists with supplied DocID. @@ -1844,18 +1821,18 @@ func (c *collection) Exists( identity immutable.Option[string], docID client.DocID, ) (bool, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return false, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) if err != nil && !errors.Is(err, ds.ErrNotFound) { return false, err } - return exists && !isDeleted, c.commitImplicitTxn(ctx, txn) + return exists && !isDeleted, txn.Commit(ctx) } // check if a document exists with the given primary key @@ -1916,35 +1893,6 @@ func (c *collection) saveCompositeToMerkleCRDT( return merkleCRDT.Save(ctx, links) } -// getTxn gets or creates a new transaction from the underlying db. -// If the collection already has a txn, return the existing one. -// Otherwise, create a new implicit transaction. -func (c *collection) getTxn(ctx context.Context, readonly bool) (datastore.Txn, error) { - if c.txn.HasValue() { - return c.txn.Value(), nil - } - return c.db.NewTxn(ctx, readonly) -} - -// discardImplicitTxn is a proxy function used by the collection to execute the Discard() -// transaction function only if its an implicit transaction. -// -// Implicit transactions are transactions that are created *during* an operation execution as a side effect. -// -// Explicit transactions are provided to the collection object via the "WithTxn(...)" function. -func (c *collection) discardImplicitTxn(ctx context.Context, txn datastore.Txn) { - if !c.txn.HasValue() { - txn.Discard(ctx) - } -} - -func (c *collection) commitImplicitTxn(ctx context.Context, txn datastore.Txn) error { - if !c.txn.HasValue() { - return txn.Commit(ctx) - } - return nil -} - func (c *collection) getPrimaryKeyFromDocID(docID client.DocID) core.PrimaryDataStoreKey { return core.PrimaryDataStoreKey{ CollectionRootID: c.Description().RootID, diff --git a/db/collection_delete.go b/db/collection_delete.go index 984cd27a21..8d5bf3f2bb 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -54,12 +54,11 @@ func (c *collection) DeleteWithDocID( identity immutable.Option[string], docID client.DocID, ) (*client.DeleteResult, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } - - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) dsKey := c.getPrimaryKeyFromDocID(docID) res, err := c.deleteWithKey(ctx, identity, txn, dsKey) @@ -67,7 +66,7 @@ func (c *collection) DeleteWithDocID( return nil, err } - return res, c.commitImplicitTxn(ctx, txn) + return res, txn.Commit(ctx) } // DeleteWithDocIDs is the same as DeleteWithDocID but accepts multiple DocIDs as a slice. @@ -76,19 +75,18 @@ func (c *collection) DeleteWithDocIDs( identity immutable.Option[string], docIDs []client.DocID, ) (*client.DeleteResult, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } - - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) res, err := c.deleteWithIDs(ctx, identity, txn, docIDs, client.Deleted) if err != nil { return nil, err } - return res, c.commitImplicitTxn(ctx, txn) + return res, txn.Commit(ctx) } // DeleteWithFilter deletes using a filter to target documents for delete. @@ -97,19 +95,18 @@ func (c *collection) DeleteWithFilter( identity immutable.Option[string], filter any, ) (*client.DeleteResult, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } - - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) res, err := c.deleteWithFilter(ctx, identity, txn, filter, client.Deleted) if err != nil { return nil, err } - return res, c.commitImplicitTxn(ctx, txn) + return res, txn.Commit(ctx) } func (c *collection) deleteWithKey( diff --git a/db/collection_get.go b/db/collection_get.go index 16d5bd4711..8ae0dcae75 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -29,11 +29,11 @@ func (c *collection) Get( showDeleted bool, ) (*client.Document, error) { // create txn - txn, err := c.getTxn(ctx, true) + ctx, txn, err := ensureContextTxn(ctx, c.db, true) if err != nil { return nil, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) found, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) @@ -53,7 +53,7 @@ func (c *collection) Get( return nil, client.ErrDocumentNotFoundOrNotAuthorized } - return doc, c.commitImplicitTxn(ctx, txn) + return doc, txn.Commit(ctx) } func (c *collection) get( diff --git a/db/collection_index.go b/db/collection_index.go index 1a7af8cc25..3e33c94709 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -41,7 +41,7 @@ func (db *db) createCollectionIndex( if err != nil { return client.IndexDescription{}, NewErrCanNotReadCollection(collectionName, err) } - col = col.WithTxn(txn) + ctx = SetContextTxn(ctx, txn) return col.CreateIndex(ctx, desc) } @@ -54,7 +54,7 @@ func (db *db) dropCollectionIndex( if err != nil { return NewErrCanNotReadCollection(collectionName, err) } - col = col.WithTxn(txn) + ctx = SetContextTxn(ctx, txn) return col.DropIndex(ctx, indexName) } @@ -112,26 +112,26 @@ func (db *db) fetchCollectionIndexDescriptions( } func (c *collection) CreateDocIndex(ctx context.Context, doc *client.Document) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) err = c.indexNewDoc(ctx, txn, doc) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } func (c *collection) UpdateDocIndex(ctx context.Context, oldDoc, newDoc *client.Document) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) err = c.deleteIndexedDoc(ctx, txn, oldDoc) if err != nil { @@ -142,22 +142,22 @@ func (c *collection) UpdateDocIndex(ctx context.Context, oldDoc, newDoc *client. return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } func (c *collection) DeleteDocIndex(ctx context.Context, doc *client.Document) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) err = c.deleteIndexedDoc(ctx, txn, doc) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } func (c *collection) indexNewDoc(ctx context.Context, txn datastore.Txn, doc *client.Document) error { @@ -242,17 +242,17 @@ func (c *collection) CreateIndex( ctx context.Context, desc client.IndexDescription, ) (client.IndexDescription, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return client.IndexDescription{}, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) index, err := c.createIndex(ctx, txn, desc) if err != nil { return client.IndexDescription{}, err } - return index.Description(), c.commitImplicitTxn(ctx, txn) + return index.Description(), txn.Commit(ctx) } func (c *collection) createIndex( @@ -398,17 +398,17 @@ func (c *collection) indexExistingDocs( // // All index artifacts for existing documents related the index will be removed. func (c *collection) DropIndex(ctx context.Context, indexName string) error { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) err = c.dropIndex(ctx, txn, indexName) if err != nil { return err } - return c.commitImplicitTxn(ctx, txn) + return txn.Commit(ctx) } func (c *collection) dropIndex(ctx context.Context, txn datastore.Txn, indexName string) error { @@ -486,11 +486,11 @@ func (c *collection) loadIndexes(ctx context.Context, txn datastore.Txn) error { // GetIndexes returns all indexes for the collection. func (c *collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) err = c.loadIndexes(ctx, txn) if err != nil { diff --git a/db/collection_update.go b/db/collection_update.go index dcc3ba6cba..e9ab2e7fa1 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -57,16 +57,17 @@ func (c *collection) UpdateWithFilter( filter any, updater string, ) (*client.UpdateResult, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) + res, err := c.updateWithFilter(ctx, identity, txn, filter, updater) if err != nil { return nil, err } - return res, c.commitImplicitTxn(ctx, txn) + return res, txn.Commit(ctx) } // UpdateWithDocID updates using a DocID to target a single document for update. @@ -78,17 +79,18 @@ func (c *collection) UpdateWithDocID( docID client.DocID, updater string, ) (*client.UpdateResult, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) + res, err := c.updateWithDocID(ctx, identity, txn, docID, updater) if err != nil { return nil, err } - return res, c.commitImplicitTxn(ctx, txn) + return res, txn.Commit(ctx) } // UpdateWithDocIDs is the same as UpdateWithDocID but accepts multiple DocIDs as a slice. @@ -100,17 +102,18 @@ func (c *collection) UpdateWithDocIDs( docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { - txn, err := c.getTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, c.db, false) if err != nil { return nil, err } - defer c.discardImplicitTxn(ctx, txn) + defer txn.Discard(ctx) + res, err := c.updateWithIDs(ctx, identity, txn, docIDs, updater) if err != nil { return nil, err } - return res, c.commitImplicitTxn(ctx, txn) + return res, txn.Commit(ctx) } func (c *collection) updateWithDocID( @@ -333,7 +336,6 @@ func (c *collection) patchPrimaryDoc( if err != nil { return err } - primaryCol = primaryCol.WithTxn(txn) primarySchema := primaryCol.Schema() primaryField, ok := primaryCol.Description().GetFieldByRelation( @@ -439,7 +441,7 @@ func (c *collection) makeSelectionPlan( ctx, identity, c.db.acp, - c.db.WithTxn(txn), + c.db, txn, ) diff --git a/db/context.go b/db/context.go new file mode 100644 index 0000000000..d39472ea5a --- /dev/null +++ b/db/context.go @@ -0,0 +1,68 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/sourcenetwork/defradb/datastore" +) + +// txnContextKey is the key type for transaction context values. +type txnContextKey struct{} + +// explicitTxn is a transaction that is managed outside of a db operation. +type explicitTxn struct { + datastore.Txn +} + +func (t *explicitTxn) Commit(ctx context.Context) error { + return nil // do nothing +} + +func (t *explicitTxn) Discard(ctx context.Context) { + // do nothing +} + +// transactionDB is a db that can create transactions. +type transactionDB interface { + NewTxn(context.Context, bool) (datastore.Txn, error) +} + +// ensureContextTxn ensures that the returned context has a transaction. +// +// If a transactions exists on the context it will be made explicit, +// otherwise a new implicit transaction will be created. +func ensureContextTxn(ctx context.Context, db transactionDB, readOnly bool) (context.Context, datastore.Txn, error) { + txn, ok := TryGetContextTxn(ctx) + if ok { + return SetContextTxn(ctx, &explicitTxn{txn}), &explicitTxn{txn}, nil + } + txn, err := db.NewTxn(ctx, readOnly) + if err != nil { + return nil, txn, err + } + return SetContextTxn(ctx, txn), txn, nil +} + +// TryGetContextTxn returns a transaction and a bool indicating if the +// txn was retrieved from the given context. +func TryGetContextTxn(ctx context.Context) (datastore.Txn, bool) { + txn, ok := ctx.Value(txnContextKey{}).(datastore.Txn) + return txn, ok +} + +// SetContextTxn returns a new context with the txn value set. +// +// This will overwrite any previously set transaction value. +func SetContextTxn(ctx context.Context, txn datastore.Txn) context.Context { + return context.WithValue(ctx, txnContextKey{}, txn) +} diff --git a/db/context_test.go b/db/context_test.go new file mode 100644 index 0000000000..c8b1a322e5 --- /dev/null +++ b/db/context_test.go @@ -0,0 +1,57 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEnsureContextTxnExplicit(t *testing.T) { + ctx := context.Background() + + db, err := newMemoryDB(ctx) + require.NoError(t, err) + + txn, err := db.NewTxn(ctx, true) + require.NoError(t, err) + + // set an explicit transaction + ctx = SetContextTxn(ctx, txn) + + ctx, txn, err = ensureContextTxn(ctx, db, true) + require.NoError(t, err) + + _, ok := txn.(*explicitTxn) + assert.True(t, ok) + + _, ok = ctx.Value(txnContextKey{}).(*explicitTxn) + assert.True(t, ok) +} + +func TestEnsureContextTxnImplicit(t *testing.T) { + ctx := context.Background() + + db, err := newMemoryDB(ctx) + require.NoError(t, err) + + ctx, txn, err := ensureContextTxn(ctx, db, true) + require.NoError(t, err) + + _, ok := txn.(*explicitTxn) + assert.False(t, ok) + + _, ok = ctx.Value(txnContextKey{}).(*explicitTxn) + assert.False(t, ok) +} diff --git a/db/db.go b/db/db.go index 239b26f9a7..e7a6fa8d09 100644 --- a/db/db.go +++ b/db/db.go @@ -89,7 +89,7 @@ func newDB( ctx context.Context, rootstore datastore.RootStore, options ...Option, -) (*implicitTxnDB, error) { +) (*db, error) { multistore := datastore.MultiStoreFrom(rootstore) parser, err := graphql.NewParser() @@ -119,7 +119,7 @@ func newDB( return nil, err } - return &implicitTxnDB{db}, nil + return db, nil } // NewTxn creates a new transaction. @@ -134,15 +134,6 @@ func (db *db) NewConcurrentTxn(ctx context.Context, readonly bool) (datastore.Tx return datastore.NewConcurrentTxnFrom(ctx, db.rootstore, txnId, readonly) } -// WithTxn returns a new [client.Store] that respects the given transaction. -func (db *db) WithTxn(txn datastore.Txn) client.Store { - return &explicitTxnDB{ - db: db, - txn: txn, - lensRegistry: db.lensRegistry.WithTxn(txn), - } -} - // Root returns the root datastore. func (db *db) Root() datastore.RootStore { return db.rootstore diff --git a/db/db_test.go b/db/db_test.go index 237a1f21ed..118adb285b 100644 --- a/db/db_test.go +++ b/db/db_test.go @@ -19,7 +19,7 @@ import ( badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" ) -func newMemoryDB(ctx context.Context) (*implicitTxnDB, error) { +func newMemoryDB(ctx context.Context) (*db, error) { opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} rootstore, err := badgerds.NewDatastore("", &opts) if err != nil { diff --git a/db/index_test.go b/db/index_test.go index 44c2e45f52..aeda2bdd6d 100644 --- a/db/index_test.go +++ b/db/index_test.go @@ -53,7 +53,7 @@ const ( type indexTestFixture struct { ctx context.Context - db *implicitTxnDB + db *db txn datastore.Txn users client.Collection t *testing.T @@ -784,7 +784,8 @@ func TestCollectionGetIndexes_ShouldCloseQueryIterator(t *testing.T) { mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything). Return(queryResults, nil) - _, err := f.users.WithTxn(mockedTxn).GetIndexes(f.ctx) + ctx := SetContextTxn(f.ctx, mockedTxn) + _, err := f.users.GetIndexes(ctx) assert.NoError(t, err) } @@ -840,7 +841,8 @@ func TestCollectionGetIndexes_IfSystemStoreFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Systemstore().Unset() mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore).Maybe() - _, err := f.users.WithTxn(mockedTxn).GetIndexes(f.ctx) + ctx := SetContextTxn(f.ctx, mockedTxn) + _, err := f.users.GetIndexes(ctx) require.ErrorIs(t, err, testCase.ExpectedError) } } @@ -902,7 +904,8 @@ func TestCollectionGetIndexes_IfStoredIndexWithUnsupportedType_ReturnError(t *te mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything). Return(mocks.NewQueryResultsWithValues(t, indexDescData), nil) - _, err = collection.WithTxn(mockedTxn).GetIndexes(f.ctx) + ctx := SetContextTxn(f.ctx, mockedTxn) + _, err = collection.GetIndexes(ctx) require.ErrorIs(t, err, NewErrUnsupportedIndexFieldType(unsupportedKind)) } @@ -1093,17 +1096,18 @@ func TestDropIndex_IfFailsToDeleteFromStorage_ReturnError(t *testing.T) { mockedTxn.MockDatastore.EXPECT().Query(mock.Anything, mock.Anything).Maybe(). Return(mocks.NewQueryResultsWithValues(t), nil) - err := f.users.WithTxn(mockedTxn).DropIndex(f.ctx, testUsersColIndexName) + ctx := SetContextTxn(f.ctx, mockedTxn) + err := f.users.DropIndex(ctx, testUsersColIndexName) require.ErrorIs(t, err, testErr) } func TestDropIndex_ShouldUpdateCollectionsDescription(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() - col := f.users.WithTxn(f.txn) - _, err := col.CreateIndex(f.ctx, getUsersIndexDescOnName()) + ctx := SetContextTxn(f.ctx, f.txn) + _, err := f.users.CreateIndex(ctx, getUsersIndexDescOnName()) require.NoError(t, err) - indOnAge, err := col.CreateIndex(f.ctx, getUsersIndexDescOnAge()) + indOnAge, err := f.users.CreateIndex(ctx, getUsersIndexDescOnAge()) require.NoError(t, err) f.commitTxn() @@ -1144,7 +1148,8 @@ func TestDropIndex_IfSystemStoreFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Systemstore().Unset() mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore).Maybe() - err := f.users.WithTxn(mockedTxn).DropIndex(f.ctx, testUsersColIndexName) + ctx := SetContextTxn(f.ctx, mockedTxn) + err := f.users.DropIndex(ctx, testUsersColIndexName) require.ErrorIs(t, err, testErr) } diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index c11eb2617f..70604fdc1f 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -322,7 +322,8 @@ func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) { dataStoreOn.Put(mock.Anything, key.ToDS(), mock.Anything).Return(errors.New("error")) dataStoreOn.Put(mock.Anything, mock.Anything, mock.Anything).Return(nil) - err := f.users.WithTxn(mockTxn).Create(f.ctx, acpIdentity.NoIdentity, doc) + ctx := SetContextTxn(f.ctx, mockTxn) + err := f.users.Create(ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(f.t, err, NewErrFailedToStoreIndexedField("name", nil)) } @@ -360,7 +361,8 @@ func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) systemStoreOn.Query(mock.Anything, mock.Anything). Return(mocks.NewQueryResultsWithValues(t, []byte("invalid")), nil) - err := f.users.WithTxn(mockTxn).Create(f.ctx, acpIdentity.NoIdentity, doc) + ctx := SetContextTxn(f.ctx, mockTxn) + err := f.users.Create(ctx, acpIdentity.NoIdentity, doc) assert.ErrorIs(t, err, datastore.NewErrInvalidStoredValue(nil)) } @@ -378,7 +380,8 @@ func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { systemStoreOn.Query(mock.Anything, mock.Anything). Return(nil, testErr) - err := f.users.WithTxn(mockTxn).Create(f.ctx, acpIdentity.NoIdentity, doc) + ctx := SetContextTxn(f.ctx, mockTxn) + err := f.users.Create(ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(t, err, testErr) } @@ -806,7 +809,8 @@ func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) usersCol.(*collection).fetcherFactory = func() fetcher.Fetcher { return fetcherMocks.NewStubbedFetcher(t) } - err = usersCol.WithTxn(mockedTxn).Update(f.ctx, acpIdentity.NoIdentity, doc) + ctx := SetContextTxn(f.ctx, mockedTxn) + err = usersCol.Update(ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(t, err, testErr) } @@ -1048,7 +1052,8 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Datastore().Unset() mockedTxn.EXPECT().Datastore().Return(mockedTxn.MockDatastore).Maybe() - err = f.users.WithTxn(mockedTxn).Update(f.ctx, acpIdentity.NoIdentity, doc) + ctx := SetContextTxn(f.ctx, mockedTxn) + err = f.users.Update(ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(t, err, testErr) } } diff --git a/db/request.go b/db/request.go index 2905ee4de2..69b300f482 100644 --- a/db/request.go +++ b/db/request.go @@ -59,7 +59,7 @@ func (db *db) execRequest( ctx, identity, db.acp, - db.WithTxn(txn), + db, txn, ) diff --git a/db/store.go b/db/store.go new file mode 100644 index 0000000000..aff11f851d --- /dev/null +++ b/db/store.go @@ -0,0 +1,275 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/lens-vm/lens/host-go/config/model" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" +) + +// ExecRequest executes a request against the database. +func (db *db) ExecRequest( + ctx context.Context, + identity immutable.Option[string], + request string, +) *client.RequestResult { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + res := &client.RequestResult{} + res.GQL.Errors = []error{err} + return res + } + defer txn.Discard(ctx) + + res := db.execRequest(ctx, identity, request, txn) + if len(res.GQL.Errors) > 0 { + return res + } + + if err := txn.Commit(ctx); err != nil { + res.GQL.Errors = []error{err} + return res + } + + return res +} + +// GetCollectionByName returns an existing collection within the database. +func (db *db) GetCollectionByName(ctx context.Context, name string) (client.Collection, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + return db.getCollectionByName(ctx, txn, name) +} + +// GetCollections gets all the currently defined collections. +func (db *db) GetCollections( + ctx context.Context, + options client.CollectionFetchOptions, +) ([]client.Collection, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + return db.getCollections(ctx, txn, options) +} + +// GetSchemaByVersionID returns the schema description for the schema version of the +// ID provided. +// +// Will return an error if it is not found. +func (db *db) GetSchemaByVersionID(ctx context.Context, versionID string) (client.SchemaDescription, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return client.SchemaDescription{}, err + } + defer txn.Discard(ctx) + + return db.getSchemaByVersionID(ctx, txn, versionID) +} + +// GetSchemas returns all schema versions that currently exist within +// this [Store]. +func (db *db) GetSchemas( + ctx context.Context, + options client.SchemaFetchOptions, +) ([]client.SchemaDescription, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + return db.getSchemas(ctx, txn, options) +} + +// GetAllIndexes gets all the indexes in the database. +func (db *db) GetAllIndexes( + ctx context.Context, +) (map[client.CollectionName][]client.IndexDescription, error) { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + return db.getAllIndexDescriptions(ctx, txn) +} + +// AddSchema takes the provided GQL schema in SDL format, and applies it to the database, +// creating the necessary collections, request types, etc. +// +// All schema types provided must not exist prior to calling this, and they may not reference existing +// types previously defined. +func (db *db) AddSchema(ctx context.Context, schemaString string) ([]client.CollectionDescription, error) { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + cols, err := db.addSchema(ctx, txn, schemaString) + if err != nil { + return nil, err + } + + if err := txn.Commit(ctx); err != nil { + return nil, err + } + return cols, nil +} + +// PatchSchema takes the given JSON patch string and applies it to the set of CollectionDescriptions +// present in the database. +// +// It will also update the GQL types used by the query system. It will error and not apply any of the +// requested, valid updates should the net result of the patch result in an invalid state. The +// individual operations defined in the patch do not need to result in a valid state, only the net result +// of the full patch. +// +// The collections (including the schema version ID) will only be updated if any changes have actually +// been made, if the net result of the patch matches the current persisted description then no changes +// will be applied. +func (db *db) PatchSchema( + ctx context.Context, + patchString string, + migration immutable.Option[model.Lens], + setAsDefaultVersion bool, +) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.patchSchema(ctx, txn, patchString, migration, setAsDefaultVersion) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *db) PatchCollection( + ctx context.Context, + patchString string, +) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.patchCollection(ctx, txn, patchString) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *db) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.setActiveSchemaVersion(ctx, txn, schemaVersionID) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *db) SetMigration(ctx context.Context, cfg client.LensConfig) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.setMigration(ctx, txn, cfg) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (db *db) AddView( + ctx context.Context, + query string, + sdl string, + transform immutable.Option[model.Lens], +) ([]client.CollectionDefinition, error) { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + defs, err := db.addView(ctx, txn, query, sdl, transform) + if err != nil { + return nil, err + } + + err = txn.Commit(ctx) + if err != nil { + return nil, err + } + + return defs, nil +} + +// BasicImport imports a json dataset. +// filepath must be accessible to the node. +func (db *db) BasicImport(ctx context.Context, filepath string) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.basicImport(ctx, txn, filepath) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +// BasicExport exports the current data or subset of data to file in json format. +func (db *db) BasicExport(ctx context.Context, config *client.BackupConfig) error { + ctx, txn, err := ensureContextTxn(ctx, db, true) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.basicExport(ctx, txn, config) + if err != nil { + return err + } + + return txn.Commit(ctx) +} diff --git a/db/subscriptions.go b/db/subscriptions.go index f6f187c54f..e649769c18 100644 --- a/db/subscriptions.go +++ b/db/subscriptions.go @@ -62,8 +62,8 @@ func (db *db) handleSubscription( continue } + ctx := SetContextTxn(ctx, txn) db.handleEvent(ctx, identity, txn, pub, evt, r) - txn.Discard(ctx) } } @@ -80,7 +80,7 @@ func (db *db) handleEvent( ctx, identity, db.acp, - db.WithTxn(txn), + db, txn, ) diff --git a/db/txn_db.go b/db/txn_db.go deleted file mode 100644 index e77176b433..0000000000 --- a/db/txn_db.go +++ /dev/null @@ -1,422 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package db - -import ( - "context" - - "github.com/lens-vm/lens/host-go/config/model" - - "github.com/sourcenetwork/immutable" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" -) - -var _ client.DB = (*implicitTxnDB)(nil) -var _ client.DB = (*explicitTxnDB)(nil) -var _ client.Store = (*implicitTxnDB)(nil) -var _ client.Store = (*explicitTxnDB)(nil) - -type implicitTxnDB struct { - *db -} - -type explicitTxnDB struct { - *db - txn datastore.Txn - lensRegistry client.LensRegistry -} - -// ExecRequest executes a request against the database. -func (db *implicitTxnDB) ExecRequest( - ctx context.Context, - identity immutable.Option[string], - request string, -) *client.RequestResult { - txn, err := db.NewTxn(ctx, false) - if err != nil { - res := &client.RequestResult{} - res.GQL.Errors = []error{err} - return res - } - defer txn.Discard(ctx) - - res := db.execRequest(ctx, identity, request, txn) - if len(res.GQL.Errors) > 0 { - return res - } - - if err := txn.Commit(ctx); err != nil { - res.GQL.Errors = []error{err} - return res - } - - return res -} - -// ExecRequest executes a transaction request against the database. -func (db *explicitTxnDB) ExecRequest( - ctx context.Context, - identity immutable.Option[string], - request string, -) *client.RequestResult { - return db.execRequest(ctx, identity, request, db.txn) -} - -// GetCollectionByName returns an existing collection within the database. -func (db *implicitTxnDB) GetCollectionByName(ctx context.Context, name string) (client.Collection, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - return db.getCollectionByName(ctx, txn, name) -} - -// GetCollectionByName returns an existing collection within the database. -func (db *explicitTxnDB) GetCollectionByName(ctx context.Context, name string) (client.Collection, error) { - col, err := db.getCollectionByName(ctx, db.txn, name) - if err != nil { - return nil, err - } - - return col.WithTxn(db.txn), nil -} - -// GetCollections gets all the currently defined collections. -func (db *implicitTxnDB) GetCollections( - ctx context.Context, - options client.CollectionFetchOptions, -) ([]client.Collection, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - return db.getCollections(ctx, txn, options) -} - -// GetCollections gets all the currently defined collections. -func (db *explicitTxnDB) GetCollections( - ctx context.Context, - options client.CollectionFetchOptions, -) ([]client.Collection, error) { - cols, err := db.getCollections(ctx, db.txn, options) - if err != nil { - return nil, err - } - - for i := range cols { - cols[i] = cols[i].WithTxn(db.txn) - } - - return cols, nil -} - -// GetSchemaByVersionID returns the schema description for the schema version of the -// ID provided. -// -// Will return an error if it is not found. -func (db *implicitTxnDB) GetSchemaByVersionID(ctx context.Context, versionID string) (client.SchemaDescription, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return client.SchemaDescription{}, err - } - defer txn.Discard(ctx) - - return db.getSchemaByVersionID(ctx, txn, versionID) -} - -// GetSchemaByVersionID returns the schema description for the schema version of the -// ID provided. -// -// Will return an error if it is not found. -func (db *explicitTxnDB) GetSchemaByVersionID(ctx context.Context, versionID string) (client.SchemaDescription, error) { - return db.getSchemaByVersionID(ctx, db.txn, versionID) -} - -// GetSchemas returns all schema versions that currently exist within -// this [Store]. -func (db *implicitTxnDB) GetSchemas( - ctx context.Context, - options client.SchemaFetchOptions, -) ([]client.SchemaDescription, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - return db.getSchemas(ctx, txn, options) -} - -// GetSchemas returns all schema versions that currently exist within -// this [Store]. -func (db *explicitTxnDB) GetSchemas( - ctx context.Context, - options client.SchemaFetchOptions, -) ([]client.SchemaDescription, error) { - return db.getSchemas(ctx, db.txn, options) -} - -// GetAllIndexes gets all the indexes in the database. -func (db *implicitTxnDB) GetAllIndexes( - ctx context.Context, -) (map[client.CollectionName][]client.IndexDescription, error) { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - return db.getAllIndexDescriptions(ctx, txn) -} - -// GetAllIndexes gets all the indexes in the database. -func (db *explicitTxnDB) GetAllIndexes( - ctx context.Context, -) (map[client.CollectionName][]client.IndexDescription, error) { - return db.getAllIndexDescriptions(ctx, db.txn) -} - -// AddSchema takes the provided GQL schema in SDL format, and applies it to the database, -// creating the necessary collections, request types, etc. -// -// All schema types provided must not exist prior to calling this, and they may not reference existing -// types previously defined. -func (db *implicitTxnDB) AddSchema(ctx context.Context, schemaString string) ([]client.CollectionDescription, error) { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - cols, err := db.addSchema(ctx, txn, schemaString) - if err != nil { - return nil, err - } - - if err := txn.Commit(ctx); err != nil { - return nil, err - } - return cols, nil -} - -// AddSchema takes the provided GQL schema in SDL format, and applies it to the database, -// creating the necessary collections, request types, etc. -// -// All schema types provided must not exist prior to calling this, and they may not reference existing -// types previously defined. -func (db *explicitTxnDB) AddSchema(ctx context.Context, schemaString string) ([]client.CollectionDescription, error) { - return db.addSchema(ctx, db.txn, schemaString) -} - -// PatchSchema takes the given JSON patch string and applies it to the set of CollectionDescriptions -// present in the database. -// -// It will also update the GQL types used by the query system. It will error and not apply any of the -// requested, valid updates should the net result of the patch result in an invalid state. The -// individual operations defined in the patch do not need to result in a valid state, only the net result -// of the full patch. -// -// The collections (including the schema version ID) will only be updated if any changes have actually -// been made, if the net result of the patch matches the current persisted description then no changes -// will be applied. -func (db *implicitTxnDB) PatchSchema( - ctx context.Context, - patchString string, - migration immutable.Option[model.Lens], - setAsDefaultVersion bool, -) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.patchSchema(ctx, txn, patchString, migration, setAsDefaultVersion) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -// PatchSchema takes the given JSON patch string and applies it to the set of CollectionDescriptions -// present in the database. -// -// It will also update the GQL types used by the query system. It will error and not apply any of the -// requested, valid updates should the net result of the patch result in an invalid state. The -// individual operations defined in the patch do not need to result in a valid state, only the net result -// of the full patch. -// -// The collections (including the schema version ID) will only be updated if any changes have actually -// been made, if the net result of the patch matches the current persisted description then no changes -// will be applied. -func (db *explicitTxnDB) PatchSchema( - ctx context.Context, - patchString string, - migration immutable.Option[model.Lens], - setAsDefaultVersion bool, -) error { - return db.patchSchema(ctx, db.txn, patchString, migration, setAsDefaultVersion) -} - -func (db *implicitTxnDB) PatchCollection( - ctx context.Context, - patchString string, -) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.patchCollection(ctx, txn, patchString) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -func (db *explicitTxnDB) PatchCollection( - ctx context.Context, - patchString string, -) error { - return db.patchCollection(ctx, db.txn, patchString) -} - -func (db *implicitTxnDB) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.setActiveSchemaVersion(ctx, txn, schemaVersionID) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -func (db *explicitTxnDB) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string) error { - return db.setActiveSchemaVersion(ctx, db.txn, schemaVersionID) -} - -func (db *implicitTxnDB) SetMigration(ctx context.Context, cfg client.LensConfig) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.setMigration(ctx, txn, cfg) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -func (db *explicitTxnDB) SetMigration(ctx context.Context, cfg client.LensConfig) error { - return db.setMigration(ctx, db.txn, cfg) -} - -func (db *implicitTxnDB) AddView( - ctx context.Context, - query string, - sdl string, - transform immutable.Option[model.Lens], -) ([]client.CollectionDefinition, error) { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - defs, err := db.addView(ctx, txn, query, sdl, transform) - if err != nil { - return nil, err - } - - err = txn.Commit(ctx) - if err != nil { - return nil, err - } - - return defs, nil -} - -func (db *explicitTxnDB) AddView( - ctx context.Context, - query string, - sdl string, - transform immutable.Option[model.Lens], -) ([]client.CollectionDefinition, error) { - return db.addView(ctx, db.txn, query, sdl, transform) -} - -// BasicImport imports a json dataset. -// filepath must be accessible to the node. -func (db *implicitTxnDB) BasicImport(ctx context.Context, filepath string) error { - txn, err := db.NewTxn(ctx, false) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.basicImport(ctx, txn, filepath) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -// BasicImport imports a json dataset. -// filepath must be accessible to the node. -func (db *explicitTxnDB) BasicImport(ctx context.Context, filepath string) error { - return db.basicImport(ctx, db.txn, filepath) -} - -// BasicExport exports the current data or subset of data to file in json format. -func (db *implicitTxnDB) BasicExport(ctx context.Context, config *client.BackupConfig) error { - txn, err := db.NewTxn(ctx, true) - if err != nil { - return err - } - defer txn.Discard(ctx) - - err = db.basicExport(ctx, txn, config) - if err != nil { - return err - } - - return txn.Commit(ctx) -} - -// BasicExport exports the current data or subset of data to file in json format. -func (db *explicitTxnDB) BasicExport(ctx context.Context, config *client.BackupConfig) error { - return db.basicExport(ctx, db.txn, config) -} - -// LensRegistry returns the LensRegistry in use by this database instance. -// -// It exposes several useful thread-safe migration related functions. -func (db *explicitTxnDB) LensRegistry() client.LensRegistry { - return db.lensRegistry -} diff --git a/http/client.go b/http/client.go index 69c5f2a503..8837ce2e2d 100644 --- a/http/client.go +++ b/http/client.go @@ -86,11 +86,6 @@ func (c *Client) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore return &Transaction{txRes.ID, c.http}, nil } -func (c *Client) WithTxn(tx datastore.Txn) client.Store { - client := c.http.withTxn(tx.ID()) - return &Client{client} -} - func (c *Client) BasicImport(ctx context.Context, filepath string) error { methodURL := c.http.baseURL.JoinPath("backup", "import") diff --git a/http/client_collection.go b/http/client_collection.go index c53bc7e7ff..39ede6aafc 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -25,7 +25,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" ) var _ client.Collection = (*Collection)(nil) @@ -445,13 +444,6 @@ func (c *Collection) Get( return doc, nil } -func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { - return &Collection{ - http: c.http.withTxn(tx.ID()), - def: c.def, - } -} - func (c *Collection) GetAllDocIDs( ctx context.Context, identity immutable.Option[string], diff --git a/http/client_lens.go b/http/client_lens.go index 9021aa31d6..34945a41d6 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -21,7 +21,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) var _ client.LensRegistry = (*LensRegistry)(nil) @@ -31,11 +30,6 @@ type LensRegistry struct { http *httpClient } -func (c *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { - http := c.http.withTxn(tx.ID()) - return &LensRegistry{http} -} - type setMigrationRequest struct { CollectionID uint32 Config model.Lens diff --git a/http/handler.go b/http/handler.go index 7cd278593b..e6d83dbdd3 100644 --- a/http/handler.go +++ b/http/handler.go @@ -54,7 +54,6 @@ func NewApiRouter() (*Router, error) { }) router.AddRouteGroup(func(r *Router) { - r.AddMiddleware(LensMiddleware) lens_handler.bindRoutes(r) }) @@ -82,7 +81,6 @@ func NewHandler(db client.DB) (*Handler, error) { r.Use( ApiMiddleware(db, txs), TransactionMiddleware, - StoreMiddleware, ) r.Handle("/*", router) }) diff --git a/http/handler_ccip.go b/http/handler_ccip.go index 36151c5cc3..dfe8a66083 100644 --- a/http/handler_ccip.go +++ b/http/handler_ccip.go @@ -35,7 +35,7 @@ type CCIPResponse struct { // ExecCCIP handles GraphQL over Cross Chain Interoperability Protocol requests. func (c *ccipHandler) ExecCCIP(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var ccipReq CCIPRequest switch req.Method { diff --git a/http/handler_collection.go b/http/handler_collection.go index 1f41442849..8b7f0cf64c 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -331,7 +331,7 @@ func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Reques } func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) indexesMap, err := store.GetAllIndexes(req.Context()) if err != nil { diff --git a/http/handler_lens.go b/http/handler_lens.go index 532eaacefc..94ef9c2abe 100644 --- a/http/handler_lens.go +++ b/http/handler_lens.go @@ -22,9 +22,9 @@ import ( type lensHandler struct{} func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { - lens := req.Context().Value(lensContextKey).(client.LensRegistry) + store := req.Context().Value(dbContextKey).(client.Store) - err := lens.ReloadLenses(req.Context()) + err := store.LensRegistry().ReloadLenses(req.Context()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -33,7 +33,7 @@ func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { } func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { - lens := req.Context().Value(lensContextKey).(client.LensRegistry) + store := req.Context().Value(dbContextKey).(client.Store) var request setMigrationRequest if err := requestJSON(req, &request); err != nil { @@ -41,7 +41,7 @@ func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { return } - err := lens.SetMigration(req.Context(), request.CollectionID, request.Config) + err := store.LensRegistry().SetMigration(req.Context(), request.CollectionID, request.Config) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -50,7 +50,7 @@ func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { } func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { - lens := req.Context().Value(lensContextKey).(client.LensRegistry) + store := req.Context().Value(dbContextKey).(client.Store) var request migrateRequest if err := requestJSON(req, &request); err != nil { @@ -58,7 +58,7 @@ func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { return } - result, err := lens.MigrateUp(req.Context(), enumerable.New(request.Data), request.CollectionID) + result, err := store.LensRegistry().MigrateUp(req.Context(), enumerable.New(request.Data), request.CollectionID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -75,7 +75,7 @@ func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { } func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { - lens := req.Context().Value(lensContextKey).(client.LensRegistry) + store := req.Context().Value(dbContextKey).(client.Store) var request migrateRequest if err := requestJSON(req, &request); err != nil { @@ -83,7 +83,7 @@ func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { return } - result, err := lens.MigrateDown(req.Context(), enumerable.New(request.Data), request.CollectionID) + result, err := store.LensRegistry().MigrateDown(req.Context(), enumerable.New(request.Data), request.CollectionID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return diff --git a/http/handler_store.go b/http/handler_store.go index 4c57eda34f..c71e108818 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -27,7 +27,7 @@ import ( type storeHandler struct{} func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var config client.BackupConfig if err := requestJSON(req, &config); err != nil { @@ -43,7 +43,7 @@ func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var config client.BackupConfig if err := requestJSON(req, &config); err != nil { @@ -59,7 +59,7 @@ func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) schema, err := io.ReadAll(req.Body) if err != nil { @@ -75,7 +75,7 @@ func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var message patchSchemaRequest err := requestJSON(req, &message) @@ -93,7 +93,7 @@ func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) PatchCollection(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var patch string err := requestJSON(req, &patch) @@ -111,7 +111,7 @@ func (s *storeHandler) PatchCollection(rw http.ResponseWriter, req *http.Request } func (s *storeHandler) SetActiveSchemaVersion(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) schemaVersionID, err := io.ReadAll(req.Body) if err != nil { @@ -127,7 +127,7 @@ func (s *storeHandler) SetActiveSchemaVersion(rw http.ResponseWriter, req *http. } func (s *storeHandler) AddView(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var message addViewRequest err := requestJSON(req, &message) @@ -146,7 +146,7 @@ func (s *storeHandler) AddView(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var cfg client.LensConfig if err := requestJSON(req, &cfg); err != nil { @@ -163,7 +163,7 @@ func (s *storeHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) options := client.CollectionFetchOptions{} if req.URL.Query().Has("name") { @@ -199,7 +199,7 @@ func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) } func (s *storeHandler) GetSchema(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) options := client.SchemaFetchOptions{} if req.URL.Query().Has("version_id") { @@ -221,7 +221,7 @@ func (s *storeHandler) GetSchema(rw http.ResponseWriter, req *http.Request) { } func (s *storeHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) indexes, err := store.GetAllIndexes(req.Context()) if err != nil { @@ -296,7 +296,7 @@ func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { } func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + store := req.Context().Value(dbContextKey).(client.Store) var request GraphQLRequest switch { diff --git a/http/http_client.go b/http/http_client.go index 13abb3c6d0..5bcda30dcd 100644 --- a/http/http_client.go +++ b/http/http_client.go @@ -17,12 +17,13 @@ import ( "net/http" "net/url" "strings" + + "github.com/sourcenetwork/defradb/db" ) type httpClient struct { client *http.Client baseURL *url.URL - txValue string } func newHttpClient(rawURL string) (*httpClient, error) { @@ -40,20 +41,13 @@ func newHttpClient(rawURL string) (*httpClient, error) { return &client, nil } -func (c *httpClient) withTxn(value uint64) *httpClient { - return &httpClient{ - client: c.client, - baseURL: c.baseURL, - txValue: fmt.Sprintf("%d", value), - } -} - func (c *httpClient) setDefaultHeaders(req *http.Request) { req.Header.Set("Accept", "application/json") req.Header.Set("Content-Type", "application/json") - if c.txValue != "" { - req.Header.Set(TX_HEADER_NAME, c.txValue) + txn, ok := db.TryGetContextTxn(req.Context()) + if ok { + req.Header.Set(TX_HEADER_NAME, fmt.Sprintf("%d", txn.ID())) } } diff --git a/http/middleware.go b/http/middleware.go index f18ba8bf60..674921fd73 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -23,6 +23,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/db" ) const TX_HEADER_NAME = "x-defradb-tx" @@ -34,20 +35,6 @@ var ( txsContextKey = contextKey("txs") // dbContextKey is the context key for the client.DB dbContextKey = contextKey("db") - // txContextKey is the context key for the datastore.Txn - // - // This will only be set if a transaction id is specified. - txContextKey = contextKey("tx") - // storeContextKey is the context key for the client.Store - // - // If a transaction exists, all operations will be executed - // in the current transaction context. - storeContextKey = contextKey("store") - // lensContextKey is the context key for the client.LensRegistry - // - // If a transaction exists, all operations will be executed - // in the current transaction context. - lensContextKey = contextKey("lens") // colContextKey is the context key for the client.Collection // // If a transaction exists, all operations will be executed @@ -102,42 +89,10 @@ func TransactionMiddleware(next http.Handler) http.Handler { next.ServeHTTP(rw, req) return } - - ctx := context.WithValue(req.Context(), txContextKey, tx) - next.ServeHTTP(rw, req.WithContext(ctx)) - }) -} - -// StoreMiddleware sets the db context for the current request. -func StoreMiddleware(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - db := req.Context().Value(dbContextKey).(client.DB) - - var store client.Store - if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { - store = db.WithTxn(tx) - } else { - store = db + ctx := req.Context() + if val, ok := tx.(datastore.Txn); ok { + ctx = db.SetContextTxn(ctx, val) } - - ctx := context.WithValue(req.Context(), storeContextKey, store) - next.ServeHTTP(rw, req.WithContext(ctx)) - }) -} - -// LensMiddleware sets the lens context for the current request. -func LensMiddleware(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) - - var lens client.LensRegistry - if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { - lens = store.LensRegistry().WithTxn(tx) - } else { - lens = store.LensRegistry() - } - - ctx := context.WithValue(req.Context(), lensContextKey, lens) next.ServeHTTP(rw, req.WithContext(ctx)) }) } @@ -145,18 +100,14 @@ func LensMiddleware(next http.Handler) http.Handler { // CollectionMiddleware sets the collection context for the current request. func CollectionMiddleware(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - store := req.Context().Value(storeContextKey).(client.Store) + db := req.Context().Value(dbContextKey).(client.DB) - col, err := store.GetCollectionByName(req.Context(), chi.URLParam(req, "name")) + col, err := db.GetCollectionByName(req.Context(), chi.URLParam(req, "name")) if err != nil { rw.WriteHeader(http.StatusNotFound) return } - if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) - } - ctx := context.WithValue(req.Context(), colContextKey, col) next.ServeHTTP(rw, req.WithContext(ctx)) }) diff --git a/net/peer_collection.go b/net/peer_collection.go index 4ef1139a1c..d8d27b361d 100644 --- a/net/peer_collection.go +++ b/net/peer_collection.go @@ -19,6 +19,7 @@ import ( acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/db" ) const marker = byte(0xff) @@ -33,8 +34,9 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er // first let's make sure the collections actually exists storeCollections := []client.Collection{} for _, col := range collectionIDs { - storeCol, err := p.db.WithTxn(txn).GetCollections( - p.ctx, + ctx = db.SetContextTxn(ctx, txn) + storeCol, err := p.db.GetCollections( + ctx, client.CollectionFetchOptions{ SchemaRoot: immutable.Some(col), }, @@ -112,8 +114,9 @@ func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) // first let's make sure the collections actually exists storeCollections := []client.Collection{} for _, col := range collectionIDs { - storeCol, err := p.db.WithTxn(txn).GetCollections( - p.ctx, + ctx = db.SetContextTxn(ctx, txn) + storeCol, err := p.db.GetCollections( + ctx, client.CollectionFetchOptions{ SchemaRoot: immutable.Some(col), }, diff --git a/net/peer_replicator.go b/net/peer_replicator.go index 93f6070f0b..1dd3c47cf4 100644 --- a/net/peer_replicator.go +++ b/net/peer_replicator.go @@ -21,6 +21,7 @@ import ( acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/db" ) func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { @@ -40,12 +41,15 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { return err } + // set transaction for all operations + ctx = db.SetContextTxn(ctx, txn) + var collections []client.Collection switch { case len(rep.Schemas) > 0: // if specific collections are chosen get them by name for _, name := range rep.Schemas { - col, err := p.db.WithTxn(txn).GetCollectionByName(ctx, name) + col, err := p.db.GetCollectionByName(ctx, name) if err != nil { return NewErrReplicatorCollections(err) } @@ -60,7 +64,7 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { default: // default to all collections (unless a collection contains a policy). // TODO-ACP: default to all collections after resolving https://github.com/sourcenetwork/defradb/issues/2366 - allCollections, err := p.db.WithTxn(txn).GetCollections(ctx, client.CollectionFetchOptions{}) + allCollections, err := p.db.GetCollections(ctx, client.CollectionFetchOptions{}) if err != nil { return NewErrReplicatorCollections(err) } @@ -109,7 +113,7 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { // push all collection documents to the replicator peer for _, col := range added { // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 - keysCh, err := col.WithTxn(txn).GetAllDocIDs(ctx, acpIdentity.NoIdentity) + keysCh, err := col.GetAllDocIDs(ctx, acpIdentity.NoIdentity) if err != nil { return NewErrReplicatorDocID(err, col.Name().Value(), rep.Info.ID) } @@ -136,12 +140,15 @@ func (p *Peer) DeleteReplicator(ctx context.Context, rep client.Replicator) erro return err } + // set transaction for all operations + ctx = db.SetContextTxn(ctx, txn) + var collections []client.Collection switch { case len(rep.Schemas) > 0: // if specific collections are chosen get them by name for _, name := range rep.Schemas { - col, err := p.db.WithTxn(txn).GetCollectionByName(ctx, name) + col, err := p.db.GetCollectionByName(ctx, name) if err != nil { return NewErrReplicatorCollections(err) } @@ -156,7 +163,7 @@ func (p *Peer) DeleteReplicator(ctx context.Context, rep client.Replicator) erro default: // default to all collections - collections, err = p.db.WithTxn(txn).GetCollections(ctx, client.CollectionFetchOptions{}) + collections, err = p.db.GetCollections(ctx, client.CollectionFetchOptions{}) if err != nil { return NewErrReplicatorCollections(err) } diff --git a/net/server.go b/net/server.go index 58a9f16f75..73496559cf 100644 --- a/net/server.go +++ b/net/server.go @@ -33,6 +33,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore/badger/v4" + "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" pb "github.com/sourcenetwork/defradb/net/pb" ) @@ -250,11 +251,13 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return nil, err } defer txn.Discard(ctx) - store := s.db.WithTxn(txn) + + // use a transaction for all operations + ctx = db.SetContextTxn(ctx, txn) // Currently a schema is the best way we have to link a push log request to a collection, // this will change with https://github.com/sourcenetwork/defradb/issues/1085 - col, err := s.getActiveCollection(ctx, store, string(req.Body.SchemaRoot)) + col, err := s.getActiveCollection(ctx, s.db, string(req.Body.SchemaRoot)) if err != nil { return nil, err } @@ -271,9 +274,9 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return nil, errors.Wrap("failed to decode block to ipld.Node", err) } - var session sync.WaitGroup + var wg sync.WaitGroup bp := newBlockProcessor(s.peer, txn, col, dsKey, getter) - err = bp.processRemoteBlock(ctx, &session, nd, true) + err = bp.processRemoteBlock(ctx, &wg, nd, true) if err != nil { log.ErrorContextE( ctx, @@ -283,10 +286,10 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL corelog.Any("CID", cid), ) } - session.Wait() + wg.Wait() bp.mergeBlocks(ctx) - err = s.syncIndexedDocs(ctx, col.WithTxn(txn), docID) + err = s.syncIndexedDocs(ctx, col, docID) if err != nil { return nil, err } @@ -350,14 +353,12 @@ func (s *server) syncIndexedDocs( col client.Collection, docID client.DocID, ) error { - preTxnCol, err := s.db.GetCollectionByName(ctx, col.Name().Value()) - if err != nil { - return err - } + // remove transaction from old context + oldCtx := db.SetContextTxn(ctx, nil) //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 // Resolve while handling acp <> secondary indexes. - oldDoc, err := preTxnCol.Get(ctx, acpIdentity.NoIdentity, docID, false) + oldDoc, err := col.Get(oldCtx, acpIdentity.NoIdentity, docID, false) isNewDoc := errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) if !isNewDoc && err != nil { return err @@ -372,7 +373,7 @@ func (s *server) syncIndexedDocs( } if isDeletedDoc { - return preTxnCol.DeleteDocIndex(ctx, oldDoc) + return col.DeleteDocIndex(oldCtx, oldDoc) } else if isNewDoc { return col.CreateDocIndex(ctx, doc) } else { diff --git a/planner/create.go b/planner/create.go index 3333ae999e..bedb1be5d5 100644 --- a/planner/create.go +++ b/planner/create.go @@ -78,7 +78,7 @@ func (n *createNode) Next() (bool, error) { return false, nil } - if err := n.collection.WithTxn(n.p.txn).Create( + if err := n.collection.Create( n.p.ctx, n.p.identity, n.doc, diff --git a/planner/delete.go b/planner/delete.go index 74bb14d202..87cf0994ac 100644 --- a/planner/delete.go +++ b/planner/delete.go @@ -140,7 +140,7 @@ func (p *Planner) DeleteDocs(parsed *mapper.Mutation) (planNode, error) { p: p, filter: parsed.Filter, docIDs: parsed.DocIDs.Value(), - collection: col.WithTxn(p.txn), + collection: col, source: slctNode, docMapper: docMapper{parsed.DocumentMapping}, }, nil diff --git a/planner/update.go b/planner/update.go index b86c616dbb..458094d4e0 100644 --- a/planner/update.go +++ b/planner/update.go @@ -169,7 +169,7 @@ func (p *Planner) UpdateDocs(parsed *mapper.Mutation) (planNode, error) { if err != nil { return nil, err } - update.collection = col.WithTxn(p.txn) + update.collection = col // create the results Select node resultsNode, err := p.Select(&parsed.Select) diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index 5bb4472840..caba91836d 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -57,11 +57,11 @@ func runMakePlanBench( fixture fixtures.Generator, query string, ) error { - db, _, err := benchutils.SetupDBAndCollections(b, ctx, fixture) + d, _, err := benchutils.SetupDBAndCollections(b, ctx, fixture) if err != nil { return err } - defer db.Close() + defer d.Close() parser, err := buildParser(ctx, fixture) if err != nil { @@ -73,18 +73,18 @@ func runMakePlanBench( if len(errs) > 0 { return errors.Wrap("failed to parse query string", errors.New(fmt.Sprintf("%v", errs))) } - txn, err := db.NewTxn(ctx, false) + txn, err := d.NewTxn(ctx, false) if err != nil { return errors.Wrap("failed to create txn", err) } - b.ResetTimer() + for i := 0; i < b.N; i++ { planner := planner.New( ctx, acpIdentity.NoIdentity, acp.NoACP, - db.WithTxn(txn), + d, txn, ) plan, err := planner.MakePlan(q) diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index d10188d4b2..2ddaf86137 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -406,7 +406,7 @@ func (w *Wrapper) ExecRequest( result := &client.RequestResult{} - stdOut, stdErr, err := w.cmd.executeStream(args) + stdOut, stdErr, err := w.cmd.executeStream(ctx, args) if err != nil { result.GQL.Errors = []error{err} return result @@ -515,13 +515,6 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor return &Transaction{tx, w.cmd}, nil } -func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { - return &Wrapper{ - node: w.node, - cmd: w.cmd.withTxn(tx), - } -} - func (w *Wrapper) Root() datastore.RootStore { return w.node.Root() } diff --git a/tests/clients/cli/wrapper_cli.go b/tests/clients/cli/wrapper_cli.go index 2a985dcb18..9076605857 100644 --- a/tests/clients/cli/wrapper_cli.go +++ b/tests/clients/cli/wrapper_cli.go @@ -17,12 +17,11 @@ import ( "strings" "github.com/sourcenetwork/defradb/cli" - "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/db" ) type cliWrapper struct { address string - txValue string } func newCliWrapper(address string) *cliWrapper { @@ -31,15 +30,8 @@ func newCliWrapper(address string) *cliWrapper { } } -func (w *cliWrapper) withTxn(tx datastore.Txn) *cliWrapper { - return &cliWrapper{ - address: w.address, - txValue: fmt.Sprintf("%d", tx.ID()), - } -} - -func (w *cliWrapper) execute(_ context.Context, args []string) ([]byte, error) { - stdOut, stdErr, err := w.executeStream(args) +func (w *cliWrapper) execute(ctx context.Context, args []string) ([]byte, error) { + stdOut, stdErr, err := w.executeStream(ctx, args) if err != nil { return nil, err } @@ -57,12 +49,13 @@ func (w *cliWrapper) execute(_ context.Context, args []string) ([]byte, error) { return stdOutData, nil } -func (w *cliWrapper) executeStream(args []string) (io.ReadCloser, io.ReadCloser, error) { +func (w *cliWrapper) executeStream(ctx context.Context, args []string) (io.ReadCloser, io.ReadCloser, error) { stdOutRead, stdOutWrite := io.Pipe() stdErrRead, stdErrWrite := io.Pipe() - if w.txValue != "" { - args = append(args, "--tx", w.txValue) + tx, ok := db.TryGetContextTxn(ctx) + if ok { + args = append(args, "--tx", fmt.Sprintf("%d", tx.ID())) } args = append(args, "--url", w.address) diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index 9bb8fb9938..861606a2d1 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/http" ) @@ -448,13 +447,6 @@ func (c *Collection) Get( return doc, nil } -func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { - return &Collection{ - cmd: c.cmd.withTxn(tx), - def: c.def, - } -} - func (c *Collection) GetAllDocIDs( ctx context.Context, identity immutable.Option[string], @@ -466,7 +458,7 @@ func (c *Collection) GetAllDocIDs( args := []string{"client", "collection", "docIDs"} args = append(args, "--name", c.Description().Name.Value()) - stdOut, _, err := c.cmd.executeStream(args) + stdOut, _, err := c.cmd.executeStream(ctx, args) if err != nil { return nil, err } diff --git a/tests/clients/cli/wrapper_lens.go b/tests/clients/cli/wrapper_lens.go index da6011b9eb..a9f3e20bd1 100644 --- a/tests/clients/cli/wrapper_lens.go +++ b/tests/clients/cli/wrapper_lens.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) var _ client.LensRegistry = (*LensRegistry)(nil) @@ -29,10 +28,6 @@ type LensRegistry struct { cmd *cliWrapper } -func (w *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { - return &LensRegistry{w.cmd.withTxn(tx)} -} - func (w *LensRegistry) SetMigration(ctx context.Context, collectionID uint32, config model.Lens) error { args := []string{"client", "schema", "migration", "set-registry"} diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index 415212b99c..51fe7ae66b 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -201,10 +201,6 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor return &TxWrapper{server, client}, nil } -func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { - return w.client.WithTxn(tx) -} - func (w *Wrapper) Root() datastore.RootStore { return w.node.Root() } diff --git a/tests/integration/events/simple/with_create_txn_test.go b/tests/integration/events/simple/with_create_txn_test.go index 7ff1f838e7..f90fc96a88 100644 --- a/tests/integration/events/simple/with_create_txn_test.go +++ b/tests/integration/events/simple/with_create_txn_test.go @@ -19,6 +19,7 @@ import ( acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/db" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -42,7 +43,9 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { func(ctx context.Context, d client.DB) { txn, err := d.NewTxn(ctx, false) assert.Nil(t, err) - r := d.WithTxn(txn).ExecRequest( + + ctx = db.SetContextTxn(ctx, txn) + r := d.ExecRequest( ctx, acpIdentity.NoIdentity, `mutation { diff --git a/tests/integration/lens.go b/tests/integration/lens.go index 69c49a1cbc..541b708a33 100644 --- a/tests/integration/lens.go +++ b/tests/integration/lens.go @@ -14,6 +14,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/db" ) // ConfigureMigration is a test action which will configure a Lens migration using the @@ -42,9 +43,10 @@ func configureMigration( action ConfigureMigration, ) { for _, node := range getNodes(action.NodeID, s.nodes) { - db := getStore(s, node, action.TransactionID, action.ExpectedError) + txn := getTransaction(s, node, action.TransactionID, action.ExpectedError) + ctx := db.SetContextTxn(s.ctx, txn) - err := db.SetMigration(s.ctx, action.LensConfig) + err := node.SetMigration(ctx, action.LensConfig) expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 18c97e76d1..deb38acde3 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -32,6 +32,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" + "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/net" "github.com/sourcenetwork/defradb/request/graphql" @@ -1080,8 +1081,9 @@ func getCollections( action GetCollections, ) { for _, node := range getNodes(action.NodeID, s.nodes) { - db := getStore(s, node, action.TransactionID, "") - results, err := db.GetCollections(s.ctx, action.FilterOptions) + txn := getTransaction(s, node, action.TransactionID, "") + ctx := db.SetContextTxn(s.ctx, txn) + results, err := node.GetCollections(ctx, action.FilterOptions) expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1249,11 +1251,12 @@ func createDocViaGQL( input, ) - db := getStore(s, node, immutable.None[int](), action.ExpectedError) + txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) identity := acpIdentity.NewIdentity(action.Identity) - result := db.ExecRequest( - s.ctx, + ctx := db.SetContextTxn(s.ctx, txn) + result := node.ExecRequest( + ctx, identity, request, ) @@ -1426,10 +1429,10 @@ func updateDocViaGQL( input, ) - db := getStore(s, node, immutable.None[int](), action.ExpectedError) - - result := db.ExecRequest( - s.ctx, + txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) + ctx := db.SetContextTxn(s.ctx, txn) + result := node.ExecRequest( + ctx, acpIdentity.NewIdentity(action.Identity), request, ) @@ -1591,14 +1594,14 @@ func withRetry( return nil } -func getStore( +func getTransaction( s *state, db client.DB, transactionSpecifier immutable.Option[int], expectedError string, -) client.Store { +) datastore.Txn { if !transactionSpecifier.HasValue() { - return db + return nil } transactionID := transactionSpecifier.Value() @@ -1619,7 +1622,7 @@ func getStore( s.txns[transactionID] = txn } - return db.WithTxn(s.txns[transactionID]) + return s.txns[transactionID] } // commitTransaction commits the given transaction. @@ -1647,9 +1650,10 @@ func executeRequest( ) { var expectedErrorRaised bool for nodeID, node := range getNodes(action.NodeID, s.nodes) { - db := getStore(s, node, action.TransactionID, action.ExpectedError) - result := db.ExecRequest( - s.ctx, + txn := getTransaction(s, node, action.TransactionID, action.ExpectedError) + ctx := db.SetContextTxn(s.ctx, txn) + result := node.ExecRequest( + ctx, acpIdentity.NewIdentity(action.Identity), action.Request, ) From 7cb065b85e7b8a9d39013d82e518cb8831ed3d53 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 15 Apr 2024 11:13:17 -0700 Subject: [PATCH 28/49] refactor(i): Remove datastore.Txn from private db methods (#2518) ## Relevant issue(s) Resolves #2517 ## Description This PR removes the `datastore.Txn` parameter from private db methods. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- db/backup.go | 13 ++--- db/backup_test.go | 40 +++++++++---- db/collection.go | 125 +++++++++++++++++++--------------------- db/collection_delete.go | 24 +++----- db/collection_get.go | 7 +-- db/collection_index.go | 76 ++++++++++++------------ db/collection_update.go | 24 +++----- db/context.go | 11 ++++ db/db.go | 6 +- db/index_test.go | 24 +++++--- db/indexed_docs_test.go | 8 ++- db/lens.go | 11 ++-- db/request.go | 3 +- db/schema.go | 24 ++++---- db/sequence.go | 21 ++++--- db/store.go | 28 ++++----- db/subscriptions.go | 5 +- db/view.go | 8 +-- 18 files changed, 240 insertions(+), 218 deletions(-) diff --git a/db/backup.go b/db/backup.go index 17110bec05..4c72797b0e 100644 --- a/db/backup.go +++ b/db/backup.go @@ -20,10 +20,9 @@ import ( acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" ) -func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath string) (err error) { +func (db *db) basicImport(ctx context.Context, filepath string) (err error) { f, err := os.Open(filepath) if err != nil { return NewErrOpenFile(err, filepath) @@ -50,7 +49,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin return err } colName := t.(string) - col, err := db.getCollectionByName(ctx, txn, colName) + col, err := db.getCollectionByName(ctx, colName) if err != nil { return NewErrFailedToGetCollection(colName, err) } @@ -119,19 +118,19 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin return nil } -func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client.BackupConfig) (err error) { +func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err error) { // old key -> new Key keyChangeCache := map[string]string{} cols := []client.Collection{} if len(config.Collections) == 0 { - cols, err = db.getCollections(ctx, txn, client.CollectionFetchOptions{}) + cols, err = db.getCollections(ctx, client.CollectionFetchOptions{}) if err != nil { return NewErrFailedToGetAllCollections(err) } } else { for _, colName := range config.Collections { - col, err := db.getCollectionByName(ctx, txn, colName) + col, err := db.getCollectionByName(ctx, colName) if err != nil { return NewErrFailedToGetCollection(colName, err) } @@ -233,7 +232,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client refFieldName = field.Name + request.RelatedObjectID } } else { - foreignCol, err := db.getCollectionByName(ctx, txn, field.Kind.Underlying()) + foreignCol, err := db.getCollectionByName(ctx, field.Kind.Underlying()) if err != nil { return NewErrFailedToGetCollection(field.Kind.Underlying(), err) } diff --git a/db/backup_test.go b/db/backup_test.go index 6a9eab3cc9..968415f3b3 100644 --- a/db/backup_test.go +++ b/db/backup_test.go @@ -64,10 +64,12 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + + ctx = SetContextTxn(ctx, txn) defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -126,10 +128,12 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + + ctx = SetContextTxn(ctx, txn) defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath, Pretty: true}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath, Pretty: true}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -188,10 +192,12 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + + ctx = SetContextTxn(ctx, txn) defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -262,10 +268,12 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + + ctx = SetContextTxn(ctx, txn) defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -324,6 +332,8 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + + ctx = SetContextTxn(ctx, txn) defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" @@ -335,7 +345,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { ) require.NoError(t, err) - err = db.basicExport(ctx, txn, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) + err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) require.NoError(t, err) b, err := os.ReadFile(filepath) @@ -370,6 +380,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -380,15 +391,16 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { ) require.NoError(t, err) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) require.NoError(t, err) err = txn.Commit(ctx) require.NoError(t, err) txn, err = db.NewTxn(ctx, true) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) - col1, err := db.getCollectionByName(ctx, txn, "Address") + col1, err := db.getCollectionByName(ctx, "Address") require.NoError(t, err) key1, err := client.NewDocIDFromString("bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f") @@ -396,7 +408,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { _, err = col1.Get(ctx, acpIdentity.NoIdentity, key1, false) require.NoError(t, err) - col2, err := db.getCollectionByName(ctx, txn, "User") + col2, err := db.getCollectionByName(ctx, "User") require.NoError(t, err) key2, err := client.NewDocIDFromString("bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df") @@ -429,6 +441,7 @@ func TestBasicImport_WithJSONArray_ReturnError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -439,7 +452,7 @@ func TestBasicImport_WithJSONArray_ReturnError(t *testing.T) { ) require.NoError(t, err) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) require.ErrorIs(t, err, ErrExpectedJSONObject) err = txn.Commit(ctx) require.NoError(t, err) @@ -464,6 +477,7 @@ func TestBasicImport_WithObjectCollection_ReturnError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -474,7 +488,7 @@ func TestBasicImport_WithObjectCollection_ReturnError(t *testing.T) { ) require.NoError(t, err) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) require.ErrorIs(t, err, ErrExpectedJSONArray) err = txn.Commit(ctx) require.NoError(t, err) @@ -499,6 +513,7 @@ func TestBasicImport_WithInvalidFilepath_ReturnError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -510,7 +525,7 @@ func TestBasicImport_WithInvalidFilepath_ReturnError(t *testing.T) { require.NoError(t, err) wrongFilepath := t.TempDir() + "/some/test.json" - err = db.basicImport(ctx, txn, wrongFilepath) + err = db.basicImport(ctx, wrongFilepath) require.ErrorIs(t, err, os.ErrNotExist) err = txn.Commit(ctx) require.NoError(t, err) @@ -535,6 +550,7 @@ func TestBasicImport_WithInvalidCollection_ReturnError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -545,7 +561,7 @@ func TestBasicImport_WithInvalidCollection_ReturnError(t *testing.T) { ) require.NoError(t, err) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) require.ErrorIs(t, err, ErrFailedToGetCollection) err = txn.Commit(ctx) require.NoError(t, err) diff --git a/db/collection.go b/db/collection.go index 1afa1c775a..faae1bbda7 100644 --- a/db/collection.go +++ b/db/collection.go @@ -31,7 +31,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/db/description" "github.com/sourcenetwork/defradb/db/fetcher" @@ -85,11 +84,11 @@ func (c *collection) newFetcher() fetcher.Fetcher { // Note: Collection.ID is an auto-incrementing value that is generated by the database. func (db *db) createCollection( ctx context.Context, - txn datastore.Txn, def client.CollectionDefinition, ) (client.Collection, error) { schema := def.Schema desc := def.Description + txn := mustGetContextTxn(ctx) if desc.Name.HasValue() { exists, err := description.HasCollectionByName(ctx, txn, desc.Name.Value()) @@ -101,16 +100,16 @@ func (db *db) createCollection( } } - colSeq, err := db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return nil, err } - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return nil, err } - fieldSeq, err := db.getSequence(ctx, txn, core.NewFieldIDSequenceKey(uint32(colID))) + fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(uint32(colID))) if err != nil { return nil, err } @@ -131,7 +130,7 @@ func (db *db) createCollection( // queries too. fieldID = 0 } else { - fieldID, err = fieldSeq.next(ctx, txn) + fieldID, err = fieldSeq.next(ctx) if err != nil { return nil, err } @@ -154,12 +153,12 @@ func (db *db) createCollection( col := db.newCollection(desc, schema) for _, index := range desc.Indexes { - if _, err := col.createIndex(ctx, txn, index); err != nil { + if _, err := col.createIndex(ctx, index); err != nil { return nil, err } } - return db.getCollectionByID(ctx, txn, desc.ID) + return db.getCollectionByID(ctx, desc.ID) } // validateCollectionDefinitionPolicyDesc validates that the policy definition is valid, beyond syntax. @@ -203,7 +202,6 @@ func (db *db) validateCollectionDefinitionPolicyDesc( // applied. func (db *db) updateSchema( ctx context.Context, - txn datastore.Txn, existingSchemaByName map[string]client.SchemaDescription, proposedDescriptionsByName map[string]client.SchemaDescription, schema client.SchemaDescription, @@ -244,6 +242,7 @@ func (db *db) updateSchema( } } + txn := mustGetContextTxn(ctx) previousVersionID := schema.VersionID schema, err = description.CreateSchemaVersion(ctx, txn, schema) if err != nil { @@ -259,7 +258,7 @@ func (db *db) updateSchema( return err } - colSeq, err := db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return err } @@ -289,7 +288,7 @@ func (db *db) updateSchema( existingCol.RootID = col.RootID } - fieldSeq, err := db.getSequence(ctx, txn, core.NewFieldIDSequenceKey(existingCol.RootID)) + fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(existingCol.RootID)) if err != nil { return err } @@ -302,7 +301,7 @@ func (db *db) updateSchema( if ok { fieldID = existingField.ID } else { - nextFieldID, err := fieldSeq.next(ctx, txn) + nextFieldID, err := fieldSeq.next(ctx) if err != nil { return err } @@ -328,12 +327,12 @@ func (db *db) updateSchema( } if !isExistingCol { - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return err } - fieldSeq, err := db.getSequence(ctx, txn, core.NewFieldIDSequenceKey(col.RootID)) + fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(col.RootID)) if err != nil { return err } @@ -353,7 +352,7 @@ func (db *db) updateSchema( for _, globalField := range schema.Fields { _, exists := col.GetFieldByName(globalField.Name) if !exists { - fieldID, err := fieldSeq.next(ctx, txn) + fieldID, err := fieldSeq.next(ctx) if err != nil { return err } @@ -385,7 +384,7 @@ func (db *db) updateSchema( if setAsActiveVersion { // activate collection versions using the new schema ID. This call must be made after // all new collection versions have been saved. - err = db.setActiveSchemaVersion(ctx, txn, schema.VersionID) + err = db.setActiveSchemaVersion(ctx, schema.VersionID) if err != nil { return err } @@ -549,14 +548,13 @@ func validateUpdateSchemaFields( func (db *db) patchCollection( ctx context.Context, - txn datastore.Txn, patchString string, ) error { patch, err := jsonpatch.DecodePatch([]byte(patchString)) if err != nil { return err } - + txn := mustGetContextTxn(ctx) cols, err := description.GetCollections(ctx, txn) if err != nil { return err @@ -638,7 +636,7 @@ func (db *db) patchCollection( } } - return db.loadSchema(ctx, txn) + return db.loadSchema(ctx) } var patchCollectionValidators = []func( @@ -917,13 +915,12 @@ oldLoop: // It will return an error if the provided schema version ID does not exist. func (db *db) setActiveSchemaVersion( ctx context.Context, - txn datastore.Txn, schemaVersionID string, ) error { if schemaVersionID == "" { return ErrSchemaVersionIDEmpty } - + txn := mustGetContextTxn(ctx) cols, err := description.GetCollectionsBySchemaVersionID(ctx, txn, schemaVersionID) if err != nil { return err @@ -967,11 +964,11 @@ func (db *db) setActiveSchemaVersion( if len(sources) > 0 { // For now, we assume that each collection can only have a single source. This will likely need // to change later. - activeCol, rootCol, isActiveFound = db.getActiveCollectionDown(ctx, txn, colsByID, sources[0].SourceCollectionID) + activeCol, rootCol, isActiveFound = db.getActiveCollectionDown(ctx, colsByID, sources[0].SourceCollectionID) } if !isActiveFound { // We need to look both down and up for the active version - the most recent is not necessarily the active one. - activeCol, isActiveFound = db.getActiveCollectionUp(ctx, txn, colsBySourceID, rootCol.ID) + activeCol, isActiveFound = db.getActiveCollectionUp(ctx, colsBySourceID, rootCol.ID) } var newName string @@ -1000,12 +997,11 @@ func (db *db) setActiveSchemaVersion( } // Load the schema into the clients (e.g. GQL) - return db.loadSchema(ctx, txn) + return db.loadSchema(ctx) } func (db *db) getActiveCollectionDown( ctx context.Context, - txn datastore.Txn, colsByID map[uint32]client.CollectionDescription, id uint32, ) (client.CollectionDescription, client.CollectionDescription, bool) { @@ -1028,12 +1024,11 @@ func (db *db) getActiveCollectionDown( // For now, we assume that each collection can only have a single source. This will likely need // to change later. - return db.getActiveCollectionDown(ctx, txn, colsByID, sources[0].SourceCollectionID) + return db.getActiveCollectionDown(ctx, colsByID, sources[0].SourceCollectionID) } func (db *db) getActiveCollectionUp( ctx context.Context, - txn datastore.Txn, colsBySourceID map[uint32][]client.CollectionDescription, id uint32, ) (client.CollectionDescription, bool) { @@ -1047,7 +1042,7 @@ func (db *db) getActiveCollectionUp( if col.Name.HasValue() { return col, true } - activeCol, isFound := db.getActiveCollectionUp(ctx, txn, colsBySourceID, col.ID) + activeCol, isFound := db.getActiveCollectionUp(ctx, colsBySourceID, col.ID) if isFound { return activeCol, isFound } @@ -1056,7 +1051,9 @@ func (db *db) getActiveCollectionUp( return client.CollectionDescription{}, false } -func (db *db) getCollectionByID(ctx context.Context, txn datastore.Txn, id uint32) (client.Collection, error) { +func (db *db) getCollectionByID(ctx context.Context, id uint32) (client.Collection, error) { + txn := mustGetContextTxn(ctx) + col, err := description.GetCollectionByID(ctx, txn, id) if err != nil { return nil, err @@ -1069,7 +1066,7 @@ func (db *db) getCollectionByID(ctx context.Context, txn datastore.Txn, id uint3 collection := db.newCollection(col, schema) - err = collection.loadIndexes(ctx, txn) + err = collection.loadIndexes(ctx) if err != nil { return nil, err } @@ -1078,12 +1075,12 @@ func (db *db) getCollectionByID(ctx context.Context, txn datastore.Txn, id uint3 } // getCollectionByName returns an existing collection within the database. -func (db *db) getCollectionByName(ctx context.Context, txn datastore.Txn, name string) (client.Collection, error) { +func (db *db) getCollectionByName(ctx context.Context, name string) (client.Collection, error) { if name == "" { return nil, ErrCollectionNameEmpty } - cols, err := db.getCollections(ctx, txn, client.CollectionFetchOptions{Name: immutable.Some(name)}) + cols, err := db.getCollections(ctx, client.CollectionFetchOptions{Name: immutable.Some(name)}) if err != nil { return nil, err } @@ -1099,11 +1096,11 @@ func (db *db) getCollectionByName(ctx context.Context, txn datastore.Txn, name s // is provided. func (db *db) getCollections( ctx context.Context, - txn datastore.Txn, options client.CollectionFetchOptions, ) ([]client.Collection, error) { - var cols []client.CollectionDescription + txn := mustGetContextTxn(ctx) + var cols []client.CollectionDescription switch { case options.Name.HasValue(): col, err := description.GetCollectionByName(ctx, txn, options.Name.Value()) @@ -1172,7 +1169,7 @@ func (db *db) getCollections( collection := db.newCollection(col, schema) collections = append(collections, collection) - err = collection.loadIndexes(ctx, txn) + err = collection.loadIndexes(ctx) if err != nil { return nil, err } @@ -1182,7 +1179,9 @@ func (db *db) getCollections( } // getAllActiveDefinitions returns all queryable collection/views and any embedded schema used by them. -func (db *db) getAllActiveDefinitions(ctx context.Context, txn datastore.Txn) ([]client.CollectionDefinition, error) { +func (db *db) getAllActiveDefinitions(ctx context.Context) ([]client.CollectionDefinition, error) { + txn := mustGetContextTxn(ctx) + cols, err := description.GetActiveCollections(ctx, txn) if err != nil { return nil, err @@ -1197,7 +1196,7 @@ func (db *db) getAllActiveDefinitions(ctx context.Context, txn datastore.Txn) ([ collection := db.newCollection(col, schema) - err = collection.loadIndexes(ctx, txn) + err = collection.loadIndexes(ctx) if err != nil { return nil, err } @@ -1230,18 +1229,18 @@ func (c *collection) GetAllDocIDs( ctx context.Context, identity immutable.Option[string], ) (<-chan client.DocIDResult, error) { - ctx, txn, err := ensureContextTxn(ctx, c.db, true) + ctx, _, err := ensureContextTxn(ctx, c.db, true) if err != nil { return nil, err } - return c.getAllDocIDsChan(ctx, identity, txn) + return c.getAllDocIDsChan(ctx, identity) } func (c *collection) getAllDocIDsChan( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, ) (<-chan client.DocIDResult, error) { + txn := mustGetContextTxn(ctx) prefix := core.PrimaryDataStoreKey{ // empty path for all keys prefix CollectionRootID: c.Description().RootID, } @@ -1353,7 +1352,7 @@ func (c *collection) Create( } defer txn.Discard(ctx) - err = c.create(ctx, identity, txn, doc) + err = c.create(ctx, identity, doc) if err != nil { return err } @@ -1375,7 +1374,7 @@ func (c *collection) CreateMany( defer txn.Discard(ctx) for _, doc := range docs { - err = c.create(ctx, identity, txn, doc) + err = c.create(ctx, identity, doc) if err != nil { return err } @@ -1402,7 +1401,6 @@ func (c *collection) getDocIDAndPrimaryKeyFromDoc( func (c *collection) create( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, doc *client.Document, ) error { docID, primaryKey, err := c.getDocIDAndPrimaryKeyFromDoc(doc) @@ -1411,7 +1409,7 @@ func (c *collection) create( } // check if doc already exists - exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, identity, primaryKey) if err != nil { return err } @@ -1424,6 +1422,7 @@ func (c *collection) create( // write value object marker if we have an empty doc if len(doc.Values()) == 0 { + txn := mustGetContextTxn(ctx) valueKey := c.getDataStoreKeyFromDocID(docID) err = txn.Datastore().Put(ctx, valueKey.ToDS(), []byte{base.ObjectMarker}) if err != nil { @@ -1432,12 +1431,12 @@ func (c *collection) create( } // write data to DB via MerkleClock/CRDT - _, err = c.save(ctx, identity, txn, doc, true) + _, err = c.save(ctx, identity, doc, true) if err != nil { return err } - err = c.indexNewDoc(ctx, txn, doc) + err = c.indexNewDoc(ctx, doc) if err != nil { return err } @@ -1460,7 +1459,7 @@ func (c *collection) Update( defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) - exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, identity, primaryKey) if err != nil { return err } @@ -1471,7 +1470,7 @@ func (c *collection) Update( return NewErrDocumentDeleted(primaryKey.DocID) } - err = c.update(ctx, identity, txn, doc) + err = c.update(ctx, identity, doc) if err != nil { return err } @@ -1487,7 +1486,6 @@ func (c *collection) Update( func (c *collection) update( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, doc *client.Document, ) error { // Stop the update if the correct permissions aren't there. @@ -1504,7 +1502,7 @@ func (c *collection) update( return client.ErrDocumentNotFoundOrNotAuthorized } - _, err = c.save(ctx, identity, txn, doc, false) + _, err = c.save(ctx, identity, doc, false) if err != nil { return err } @@ -1526,7 +1524,7 @@ func (c *collection) Save( // Check if document already exists with primary DS key. primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) - exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, identity, primaryKey) if err != nil { return err } @@ -1536,9 +1534,9 @@ func (c *collection) Save( } if exists { - err = c.update(ctx, identity, txn, doc) + err = c.update(ctx, identity, doc) } else { - err = c.create(ctx, identity, txn, doc) + err = c.create(ctx, identity, doc) } if err != nil { return err @@ -1553,16 +1551,17 @@ func (c *collection) Save( func (c *collection) save( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, doc *client.Document, isCreate bool, ) (cid.Cid, error) { if !isCreate { - err := c.updateIndexedDoc(ctx, txn, doc) + err := c.updateIndexedDoc(ctx, doc) if err != nil { return cid.Undef, err } } + txn := mustGetContextTxn(ctx) + // NOTE: We delay the final Clean() call until we know // the commit on the transaction is successful. If we didn't // wait, and just did it here, then *if* the commit fails down @@ -1608,7 +1607,6 @@ func (c *collection) save( err = c.patchPrimaryDoc( ctx, identity, - txn, c.Name().Value(), relationFieldDescription, primaryKey.DocID, @@ -1626,7 +1624,6 @@ func (c *collection) save( err = c.validateOneToOneLinkDoesntAlreadyExist( ctx, identity, - txn, doc.ID().String(), fieldDescription, val.Value(), @@ -1662,7 +1659,6 @@ func (c *collection) save( headNode, priority, err := c.saveCompositeToMerkleCRDT( ctx, - txn, primaryKey.ToDataStoreKey(), links, client.Active, @@ -1697,7 +1693,6 @@ func (c *collection) save( func (c *collection) validateOneToOneLinkDoesntAlreadyExist( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, docID string, fieldDescription client.FieldDefinition, value any, @@ -1720,7 +1715,7 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( return nil } - otherCol, err := c.db.getCollectionByName(ctx, txn, objFieldDescription.Kind.Underlying()) + otherCol, err := c.db.getCollectionByName(ctx, objFieldDescription.Kind.Underlying()) if err != nil { return err } @@ -1743,7 +1738,7 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( fieldDescription.Name, value, ) - selectionPlan, err := c.makeSelectionPlan(ctx, identity, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, identity, filter) if err != nil { return err } @@ -1808,7 +1803,7 @@ func (c *collection) Delete( primaryKey := c.getPrimaryKeyFromDocID(docID) - err = c.applyDelete(ctx, identity, txn, primaryKey) + err = c.applyDelete(ctx, identity, primaryKey) if err != nil { return false, err } @@ -1828,7 +1823,7 @@ func (c *collection) Exists( defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) - exists, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) + exists, isDeleted, err := c.exists(ctx, identity, primaryKey) if err != nil && !errors.Is(err, ds.ErrNotFound) { return false, err } @@ -1839,7 +1834,6 @@ func (c *collection) Exists( func (c *collection) exists( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, ) (exists bool, isDeleted bool, err error) { canRead, err := c.checkAccessOfDocWithACP( @@ -1854,6 +1848,7 @@ func (c *collection) exists( return false, false, nil } + txn := mustGetContextTxn(ctx) val, err := txn.Datastore().Get(ctx, primaryKey.ToDS()) if err != nil && errors.Is(err, ds.ErrNotFound) { return false, false, nil @@ -1873,11 +1868,11 @@ func (c *collection) exists( // Calling it elsewhere could cause the omission of acp checks. func (c *collection) saveCompositeToMerkleCRDT( ctx context.Context, - txn datastore.Txn, dsKey core.DataStoreKey, links []core.DAGLink, status client.DocumentStatus, ) (ipld.Node, uint64, error) { + txn := mustGetContextTxn(ctx) dsKey = dsKey.WithFieldId(core.COMPOSITE_NAMESPACE) merkleCRDT := merklecrdt.NewMerkleCompositeDAG( txn, diff --git a/db/collection_delete.go b/db/collection_delete.go index 8d5bf3f2bb..a6d12399ce 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -19,7 +19,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/merkle/clock" ) @@ -61,7 +60,7 @@ func (c *collection) DeleteWithDocID( defer txn.Discard(ctx) dsKey := c.getPrimaryKeyFromDocID(docID) - res, err := c.deleteWithKey(ctx, identity, txn, dsKey) + res, err := c.deleteWithKey(ctx, identity, dsKey) if err != nil { return nil, err } @@ -81,7 +80,7 @@ func (c *collection) DeleteWithDocIDs( } defer txn.Discard(ctx) - res, err := c.deleteWithIDs(ctx, identity, txn, docIDs, client.Deleted) + res, err := c.deleteWithIDs(ctx, identity, docIDs, client.Deleted) if err != nil { return nil, err } @@ -101,7 +100,7 @@ func (c *collection) DeleteWithFilter( } defer txn.Discard(ctx) - res, err := c.deleteWithFilter(ctx, identity, txn, filter, client.Deleted) + res, err := c.deleteWithFilter(ctx, identity, filter, client.Deleted) if err != nil { return nil, err } @@ -112,12 +111,11 @@ func (c *collection) DeleteWithFilter( func (c *collection) deleteWithKey( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, key core.PrimaryDataStoreKey, ) (*client.DeleteResult, error) { // Check the key we have been given to delete with actually has a corresponding // document (i.e. document actually exists in the collection). - err := c.applyDelete(ctx, identity, txn, key) + err := c.applyDelete(ctx, identity, key) if err != nil { return nil, err } @@ -134,7 +132,6 @@ func (c *collection) deleteWithKey( func (c *collection) deleteWithIDs( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, docIDs []client.DocID, _ client.DocumentStatus, ) (*client.DeleteResult, error) { @@ -146,7 +143,7 @@ func (c *collection) deleteWithIDs( primaryKey := c.getPrimaryKeyFromDocID(docID) // Apply the function that will perform the full deletion of this document. - err := c.applyDelete(ctx, identity, txn, primaryKey) + err := c.applyDelete(ctx, identity, primaryKey) if err != nil { return nil, err } @@ -164,12 +161,11 @@ func (c *collection) deleteWithIDs( func (c *collection) deleteWithFilter( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, filter any, _ client.DocumentStatus, ) (*client.DeleteResult, error) { // Make a selection plan that will scan through only the documents with matching filter. - selectionPlan, err := c.makeSelectionPlan(ctx, identity, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, identity, filter) if err != nil { return nil, err } @@ -217,7 +213,7 @@ func (c *collection) deleteWithFilter( } // Delete the document that is associated with this DS key we got from the filter. - err = c.applyDelete(ctx, identity, txn, primaryKey) + err = c.applyDelete(ctx, identity, primaryKey) if err != nil { return nil, err } @@ -234,11 +230,10 @@ func (c *collection) deleteWithFilter( func (c *collection) applyDelete( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, ) error { // Must also have read permission to delete, inorder to check if document exists. - found, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) + found, isDeleted, err := c.exists(ctx, identity, primaryKey) if err != nil { return err } @@ -264,8 +259,8 @@ func (c *collection) applyDelete( return client.ErrDocumentNotFoundOrNotAuthorized } + txn := mustGetContextTxn(ctx) dsKey := primaryKey.ToDataStoreKey() - headset := clock.NewHeadSet( txn.Headstore(), dsKey.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), @@ -285,7 +280,6 @@ func (c *collection) applyDelete( headNode, priority, err := c.saveCompositeToMerkleCRDT( ctx, - txn, dsKey, dagLinks, client.Deleted, diff --git a/db/collection_get.go b/db/collection_get.go index 8ae0dcae75..968e6ca761 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/db/fetcher" ) @@ -36,7 +35,7 @@ func (c *collection) Get( defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) - found, isDeleted, err := c.exists(ctx, identity, txn, primaryKey) + found, isDeleted, err := c.exists(ctx, identity, primaryKey) if err != nil { return nil, err } @@ -44,7 +43,7 @@ func (c *collection) Get( return nil, client.ErrDocumentNotFoundOrNotAuthorized } - doc, err := c.get(ctx, identity, txn, primaryKey, nil, showDeleted) + doc, err := c.get(ctx, identity, primaryKey, nil, showDeleted) if err != nil { return nil, err } @@ -59,11 +58,11 @@ func (c *collection) Get( func (c *collection) get( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, primaryKey core.PrimaryDataStoreKey, fields []client.FieldDefinition, showDeleted bool, ) (*client.Document, error) { + txn := mustGetContextTxn(ctx) // create a new document fetcher df := c.newFetcher() // initialize it with the primary index diff --git a/db/collection_index.go b/db/collection_index.go index 3e33c94709..0c1921dd62 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -33,36 +33,33 @@ import ( // createCollectionIndex creates a new collection index and saves it to the database in its system store. func (db *db) createCollectionIndex( ctx context.Context, - txn datastore.Txn, collectionName string, desc client.IndexDescription, ) (client.IndexDescription, error) { - col, err := db.getCollectionByName(ctx, txn, collectionName) + col, err := db.getCollectionByName(ctx, collectionName) if err != nil { return client.IndexDescription{}, NewErrCanNotReadCollection(collectionName, err) } - ctx = SetContextTxn(ctx, txn) return col.CreateIndex(ctx, desc) } func (db *db) dropCollectionIndex( ctx context.Context, - txn datastore.Txn, collectionName, indexName string, ) error { - col, err := db.getCollectionByName(ctx, txn, collectionName) + col, err := db.getCollectionByName(ctx, collectionName) if err != nil { return NewErrCanNotReadCollection(collectionName, err) } - ctx = SetContextTxn(ctx, txn) return col.DropIndex(ctx, indexName) } // getAllIndexDescriptions returns all the index descriptions in the database. func (db *db) getAllIndexDescriptions( ctx context.Context, - txn datastore.Txn, ) (map[client.CollectionName][]client.IndexDescription, error) { + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) prefix := core.NewCollectionIndexKey(immutable.None[uint32](), "") keys, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription](ctx, @@ -96,9 +93,10 @@ func (db *db) getAllIndexDescriptions( func (db *db) fetchCollectionIndexDescriptions( ctx context.Context, - txn datastore.Txn, colID uint32, ) ([]client.IndexDescription, error) { + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) prefix := core.NewCollectionIndexKey(immutable.Some(colID), "") _, indexDescriptions, err := datastore.DeserializePrefix[client.IndexDescription]( ctx, @@ -118,7 +116,7 @@ func (c *collection) CreateDocIndex(ctx context.Context, doc *client.Document) e } defer txn.Discard(ctx) - err = c.indexNewDoc(ctx, txn, doc) + err = c.indexNewDoc(ctx, doc) if err != nil { return err } @@ -133,11 +131,11 @@ func (c *collection) UpdateDocIndex(ctx context.Context, oldDoc, newDoc *client. } defer txn.Discard(ctx) - err = c.deleteIndexedDoc(ctx, txn, oldDoc) + err = c.deleteIndexedDoc(ctx, oldDoc) if err != nil { return err } - err = c.indexNewDoc(ctx, txn, newDoc) + err = c.indexNewDoc(ctx, newDoc) if err != nil { return err } @@ -152,7 +150,7 @@ func (c *collection) DeleteDocIndex(ctx context.Context, doc *client.Document) e } defer txn.Discard(ctx) - err = c.deleteIndexedDoc(ctx, txn, doc) + err = c.deleteIndexedDoc(ctx, doc) if err != nil { return err } @@ -160,11 +158,13 @@ func (c *collection) DeleteDocIndex(ctx context.Context, doc *client.Document) e return txn.Commit(ctx) } -func (c *collection) indexNewDoc(ctx context.Context, txn datastore.Txn, doc *client.Document) error { - err := c.loadIndexes(ctx, txn) +func (c *collection) indexNewDoc(ctx context.Context, doc *client.Document) error { + err := c.loadIndexes(ctx) if err != nil { return err } + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) for _, index := range c.indexes { err = index.Save(ctx, txn, doc) if err != nil { @@ -176,10 +176,9 @@ func (c *collection) indexNewDoc(ctx context.Context, txn datastore.Txn, doc *cl func (c *collection) updateIndexedDoc( ctx context.Context, - txn datastore.Txn, doc *client.Document, ) error { - err := c.loadIndexes(ctx, txn) + err := c.loadIndexes(ctx) if err != nil { return err } @@ -188,7 +187,6 @@ func (c *collection) updateIndexedDoc( oldDoc, err := c.get( ctx, acpIdentity.NoIdentity, - txn, c.getPrimaryKeyFromDocID(doc.ID()), c.Definition().CollectIndexedFields(), false, @@ -196,6 +194,7 @@ func (c *collection) updateIndexedDoc( if err != nil { return err } + txn := mustGetContextTxn(ctx) for _, index := range c.indexes { err = index.Update(ctx, txn, oldDoc, doc) if err != nil { @@ -207,13 +206,13 @@ func (c *collection) updateIndexedDoc( func (c *collection) deleteIndexedDoc( ctx context.Context, - txn datastore.Txn, doc *client.Document, ) error { - err := c.loadIndexes(ctx, txn) + err := c.loadIndexes(ctx) if err != nil { return err } + txn := mustGetContextTxn(ctx) for _, index := range c.indexes { err = index.Delete(ctx, txn, doc) if err != nil { @@ -248,7 +247,7 @@ func (c *collection) CreateIndex( } defer txn.Discard(ctx) - index, err := c.createIndex(ctx, txn, desc) + index, err := c.createIndex(ctx, desc) if err != nil { return client.IndexDescription{}, err } @@ -257,7 +256,6 @@ func (c *collection) CreateIndex( func (c *collection) createIndex( ctx context.Context, - txn datastore.Txn, desc client.IndexDescription, ) (CollectionIndex, error) { // Don't allow creating index on a permissioned collection, until following is implemented. @@ -279,20 +277,19 @@ func (c *collection) createIndex( return nil, err } - indexKey, err := c.generateIndexNameIfNeededAndCreateKey(ctx, txn, &desc) + indexKey, err := c.generateIndexNameIfNeededAndCreateKey(ctx, &desc) if err != nil { return nil, err } colSeq, err := c.db.getSequence( ctx, - txn, core.NewIndexIDSequenceKey(c.ID()), ) if err != nil { return nil, err } - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return nil, err } @@ -303,6 +300,7 @@ func (c *collection) createIndex( return nil, err } + txn := mustGetContextTxn(ctx) err = txn.Systemstore().Put(ctx, indexKey.ToDS(), buf) if err != nil { return nil, err @@ -313,7 +311,7 @@ func (c *collection) createIndex( } c.def.Description.Indexes = append(c.def.Description.Indexes, colIndex.Description()) c.indexes = append(c.indexes, colIndex) - err = c.indexExistingDocs(ctx, txn, colIndex) + err = c.indexExistingDocs(ctx, colIndex) if err != nil { removeErr := colIndex.RemoveAll(ctx, txn) return nil, errors.Join(err, removeErr) @@ -323,10 +321,10 @@ func (c *collection) createIndex( func (c *collection) iterateAllDocs( ctx context.Context, - txn datastore.Txn, fields []client.FieldDefinition, exec func(doc *client.Document) error, ) error { + txn := mustGetContextTxn(ctx) df := c.newFetcher() err := df.Init( ctx, @@ -376,7 +374,6 @@ func (c *collection) iterateAllDocs( func (c *collection) indexExistingDocs( ctx context.Context, - txn datastore.Txn, index CollectionIndex, ) error { fields := make([]client.FieldDefinition, 0, 1) @@ -386,8 +383,8 @@ func (c *collection) indexExistingDocs( fields = append(fields, colField) } } - - return c.iterateAllDocs(ctx, txn, fields, func(doc *client.Document) error { + txn := mustGetContextTxn(ctx) + return c.iterateAllDocs(ctx, fields, func(doc *client.Document) error { return index.Save(ctx, txn, doc) }) } @@ -404,18 +401,19 @@ func (c *collection) DropIndex(ctx context.Context, indexName string) error { } defer txn.Discard(ctx) - err = c.dropIndex(ctx, txn, indexName) + err = c.dropIndex(ctx, indexName) if err != nil { return err } return txn.Commit(ctx) } -func (c *collection) dropIndex(ctx context.Context, txn datastore.Txn, indexName string) error { - err := c.loadIndexes(ctx, txn) +func (c *collection) dropIndex(ctx context.Context, indexName string) error { + err := c.loadIndexes(ctx) if err != nil { return err } + txn := mustGetContextTxn(ctx) var didFind bool for i := range c.indexes { @@ -448,7 +446,9 @@ func (c *collection) dropIndex(ctx context.Context, txn datastore.Txn, indexName return nil } -func (c *collection) dropAllIndexes(ctx context.Context, txn datastore.Txn) error { +func (c *collection) dropAllIndexes(ctx context.Context) error { + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) prefix := core.NewCollectionIndexKey(immutable.Some(c.ID()), "") keys, err := datastore.FetchKeysForPrefix(ctx, prefix.ToString(), txn.Systemstore()) @@ -466,8 +466,8 @@ func (c *collection) dropAllIndexes(ctx context.Context, txn datastore.Txn) erro return err } -func (c *collection) loadIndexes(ctx context.Context, txn datastore.Txn) error { - indexDescriptions, err := c.db.fetchCollectionIndexDescriptions(ctx, txn, c.ID()) +func (c *collection) loadIndexes(ctx context.Context) error { + indexDescriptions, err := c.db.fetchCollectionIndexDescriptions(ctx, c.ID()) if err != nil { return err } @@ -492,7 +492,7 @@ func (c *collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, } defer txn.Discard(ctx) - err = c.loadIndexes(ctx, txn) + err = c.loadIndexes(ctx) if err != nil { return nil, err } @@ -520,9 +520,11 @@ func (c *collection) checkExistingFields( func (c *collection) generateIndexNameIfNeededAndCreateKey( ctx context.Context, - txn datastore.Txn, desc *client.IndexDescription, ) (core.CollectionIndexKey, error) { + // callers of this function must set a context transaction + txn := mustGetContextTxn(ctx) + var indexKey core.CollectionIndexKey if desc.Name == "" { nameIncrement := 1 diff --git a/db/collection_update.go b/db/collection_update.go index e9ab2e7fa1..9a8e2bc552 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/planner" ) @@ -63,7 +62,7 @@ func (c *collection) UpdateWithFilter( } defer txn.Discard(ctx) - res, err := c.updateWithFilter(ctx, identity, txn, filter, updater) + res, err := c.updateWithFilter(ctx, identity, filter, updater) if err != nil { return nil, err } @@ -85,7 +84,7 @@ func (c *collection) UpdateWithDocID( } defer txn.Discard(ctx) - res, err := c.updateWithDocID(ctx, identity, txn, docID, updater) + res, err := c.updateWithDocID(ctx, identity, docID, updater) if err != nil { return nil, err } @@ -108,7 +107,7 @@ func (c *collection) UpdateWithDocIDs( } defer txn.Discard(ctx) - res, err := c.updateWithIDs(ctx, identity, txn, docIDs, updater) + res, err := c.updateWithIDs(ctx, identity, docIDs, updater) if err != nil { return nil, err } @@ -119,7 +118,6 @@ func (c *collection) UpdateWithDocIDs( func (c *collection) updateWithDocID( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, docID client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -149,7 +147,7 @@ func (c *collection) updateWithDocID( return nil, err } - err = c.update(ctx, identity, txn, doc) + err = c.update(ctx, identity, doc) if err != nil { return nil, err } @@ -164,7 +162,6 @@ func (c *collection) updateWithDocID( func (c *collection) updateWithIDs( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -198,7 +195,7 @@ func (c *collection) updateWithIDs( return nil, err } - err = c.update(ctx, identity, txn, doc) + err = c.update(ctx, identity, doc) if err != nil { return nil, err } @@ -212,7 +209,6 @@ func (c *collection) updateWithIDs( func (c *collection) updateWithFilter( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, filter any, updater string, ) (*client.UpdateResult, error) { @@ -233,7 +229,7 @@ func (c *collection) updateWithFilter( } // Make a selection plan that will scan through only the documents with matching filter. - selectionPlan, err := c.makeSelectionPlan(ctx, identity, txn, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, identity, filter) if err != nil { return nil, err } @@ -287,7 +283,7 @@ func (c *collection) updateWithFilter( } } - err = c.update(ctx, identity, txn, doc) + err = c.update(ctx, identity, doc) if err != nil { return nil, err } @@ -321,7 +317,6 @@ func (c *collection) isSecondaryIDField(fieldDesc client.FieldDefinition) (clien func (c *collection) patchPrimaryDoc( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, secondaryCollectionName string, relationFieldDescription client.FieldDefinition, docID string, @@ -332,7 +327,7 @@ func (c *collection) patchPrimaryDoc( return err } - primaryCol, err := c.db.getCollectionByName(ctx, txn, relationFieldDescription.Kind.Underlying()) + primaryCol, err := c.db.getCollectionByName(ctx, relationFieldDescription.Kind.Underlying()) if err != nil { return err } @@ -373,7 +368,6 @@ func (c *collection) patchPrimaryDoc( err = pc.validateOneToOneLinkDoesntAlreadyExist( ctx, identity, - txn, primaryDocID.String(), primaryIDField, docID, @@ -411,7 +405,6 @@ func (c *collection) patchPrimaryDoc( func (c *collection) makeSelectionPlan( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, filter any, ) (planner.RequestPlan, error) { var f immutable.Option[request.Filter] @@ -437,6 +430,7 @@ func (c *collection) makeSelectionPlan( return nil, err } + txn := mustGetContextTxn(ctx) planner := planner.New( ctx, identity, diff --git a/db/context.go b/db/context.go index d39472ea5a..f235475d24 100644 --- a/db/context.go +++ b/db/context.go @@ -41,6 +41,9 @@ type transactionDB interface { // // If a transactions exists on the context it will be made explicit, // otherwise a new implicit transaction will be created. +// +// The returned context will contain the transaction +// along with the copied values from the input context. func ensureContextTxn(ctx context.Context, db transactionDB, readOnly bool) (context.Context, datastore.Txn, error) { txn, ok := TryGetContextTxn(ctx) if ok { @@ -53,6 +56,14 @@ func ensureContextTxn(ctx context.Context, db transactionDB, readOnly bool) (con return SetContextTxn(ctx, txn), txn, nil } +// mustGetContextTxn returns the transaction from the context or panics. +// +// This should only be called from private functions within the db package +// where we ensure an implicit or explicit transaction always exists. +func mustGetContextTxn(ctx context.Context) datastore.Txn { + return ctx.Value(txnContextKey{}).(datastore.Txn) +} + // TryGetContextTxn returns a transaction and a bool indicating if the // txn was retrieved from the given context. func TryGetContextTxn(ctx context.Context) (datastore.Txn, bool) { diff --git a/db/db.go b/db/db.go index e7a6fa8d09..327f8e9c9e 100644 --- a/db/db.go +++ b/db/db.go @@ -181,7 +181,7 @@ func (db *db) initialize(ctx context.Context) error { db.glock.Lock() defer db.glock.Unlock() - txn, err := db.NewTxn(ctx, false) + ctx, txn, err := ensureContextTxn(ctx, db, false) if err != nil { return err } @@ -202,7 +202,7 @@ func (db *db) initialize(ctx context.Context) error { // if we're loading an existing database, just load the schema // and migrations and finish initialization if exists { - err = db.loadSchema(ctx, txn) + err = db.loadSchema(ctx) if err != nil { return err } @@ -220,7 +220,7 @@ func (db *db) initialize(ctx context.Context) error { // init meta data // collection sequence - _, err = db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) + _, err = db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return err } diff --git a/db/index_test.go b/db/index_test.go index aeda2bdd6d..5409b6c20e 100644 --- a/db/index_test.go +++ b/db/index_test.go @@ -219,7 +219,8 @@ func (f *indexTestFixture) createUserCollectionIndexOnAge() client.IndexDescript } func (f *indexTestFixture) dropIndex(colName, indexName string) error { - return f.db.dropCollectionIndex(f.ctx, f.txn, colName, indexName) + ctx := SetContextTxn(f.ctx, f.txn) + return f.db.dropCollectionIndex(ctx, colName, indexName) } func (f *indexTestFixture) countIndexPrefixes(indexName string) int { @@ -255,7 +256,8 @@ func (f *indexTestFixture) createCollectionIndexFor( collectionName string, desc client.IndexDescription, ) (client.IndexDescription, error) { - index, err := f.db.createCollectionIndex(f.ctx, f.txn, collectionName, desc) + ctx := SetContextTxn(f.ctx, f.txn) + index, err := f.db.createCollectionIndex(ctx, collectionName, desc) if err == nil { f.commitTxn() } @@ -263,11 +265,13 @@ func (f *indexTestFixture) createCollectionIndexFor( } func (f *indexTestFixture) getAllIndexes() (map[client.CollectionName][]client.IndexDescription, error) { - return f.db.getAllIndexDescriptions(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + return f.db.getAllIndexDescriptions(ctx) } func (f *indexTestFixture) getCollectionIndexes(colID uint32) ([]client.IndexDescription, error) { - return f.db.fetchCollectionIndexDescriptions(f.ctx, f.txn, colID) + ctx := SetContextTxn(f.ctx, f.txn) + return f.db.fetchCollectionIndexDescriptions(ctx, colID) } func TestCreateIndex_IfFieldsIsEmpty_ReturnError(t *testing.T) { @@ -1172,7 +1176,8 @@ func TestDropAllIndexes_ShouldDeleteAllIndexes(t *testing.T) { assert.Equal(t, 2, f.countIndexPrefixes("")) - err = f.users.(*collection).dropAllIndexes(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + err = f.users.(*collection).dropAllIndexes(ctx) assert.NoError(t, err) assert.Equal(t, 0, f.countIndexPrefixes("")) @@ -1184,7 +1189,8 @@ func TestDropAllIndexes_IfStorageFails_ReturnError(t *testing.T) { f.createUserCollectionIndexOnName() f.db.Close() - err := f.users.(*collection).dropAllIndexes(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + err := f.users.(*collection).dropAllIndexes(ctx) assert.Error(t, err) } @@ -1240,7 +1246,8 @@ func TestDropAllIndexes_IfSystemStorageFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Systemstore().Unset() mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore).Maybe() - err := f.users.(*collection).dropAllIndexes(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + err := f.users.(*collection).dropAllIndexes(ctx) assert.ErrorIs(t, err, testErr, testCase.Name) } } @@ -1261,7 +1268,8 @@ func TestDropAllIndexes_ShouldCloseQueryIterator(t *testing.T) { mockedTxn.EXPECT().Systemstore().Unset() mockedTxn.EXPECT().Systemstore().Return(mockedTxn.MockSystemstore).Maybe() - _ = f.users.(*collection).dropAllIndexes(f.ctx, f.txn) + ctx := SetContextTxn(f.ctx, f.txn) + _ = f.users.(*collection).dropAllIndexes(ctx) } func TestNewCollectionIndex_IfDescriptionHasNoFields_ReturnError(t *testing.T) { diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 70604fdc1f..99a4c9ee56 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -131,7 +131,8 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey { return key } - cols, err := b.f.db.getCollections(b.f.ctx, b.f.txn, client.CollectionFetchOptions{}) + ctx := SetContextTxn(b.f.ctx, b.f.txn) + cols, err := b.f.db.getCollections(ctx, client.CollectionFetchOptions{}) require.NoError(b.f.t, err) var collection client.Collection for _, col := range cols { @@ -793,7 +794,8 @@ func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) require.NoError(t, err) // retrieve the collection without index cached - usersCol, err := f.db.getCollectionByName(f.ctx, f.txn, usersColName) + ctx := SetContextTxn(f.ctx, f.txn) + usersCol, err := f.db.getCollectionByName(ctx, usersColName) require.NoError(t, err) testErr := errors.New("test error") @@ -809,7 +811,7 @@ func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) usersCol.(*collection).fetcherFactory = func() fetcher.Fetcher { return fetcherMocks.NewStubbedFetcher(t) } - ctx := SetContextTxn(f.ctx, mockedTxn) + ctx = SetContextTxn(f.ctx, mockedTxn) err = usersCol.Update(ctx, acpIdentity.NoIdentity, doc) require.ErrorIs(t, err, testErr) } diff --git a/db/lens.go b/db/lens.go index d5240dad83..f21d084f88 100644 --- a/db/lens.go +++ b/db/lens.go @@ -18,12 +18,13 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/description" "github.com/sourcenetwork/defradb/errors" ) -func (db *db) setMigration(ctx context.Context, txn datastore.Txn, cfg client.LensConfig) error { +func (db *db) setMigration(ctx context.Context, cfg client.LensConfig) error { + txn := mustGetContextTxn(ctx) + dstCols, err := description.GetCollectionsBySchemaVersionID(ctx, txn, cfg.DestinationSchemaVersionID) if err != nil { return err @@ -34,7 +35,7 @@ func (db *db) setMigration(ctx context.Context, txn datastore.Txn, cfg client.Le return err } - colSeq, err := db.getSequence(ctx, txn, core.CollectionIDSequenceKey{}) + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return err } @@ -42,7 +43,7 @@ func (db *db) setMigration(ctx context.Context, txn datastore.Txn, cfg client.Le if len(sourceCols) == 0 { // If no collections are found with the given [SourceSchemaVersionID], this migration must be from // a collection/schema version that does not yet exist locally. We must now create it. - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return err } @@ -86,7 +87,7 @@ func (db *db) setMigration(ctx context.Context, txn datastore.Txn, cfg client.Le if !isDstCollectionFound { // If the destination collection was not found, we must create it. This can happen when setting a migration // to a schema version that does not yet exist locally. - colID, err := colSeq.next(ctx, txn) + colID, err := colSeq.next(ctx) if err != nil { return err } diff --git a/db/request.go b/db/request.go index 69b300f482..099f8852ed 100644 --- a/db/request.go +++ b/db/request.go @@ -16,7 +16,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/planner" ) @@ -25,7 +24,6 @@ func (db *db) execRequest( ctx context.Context, identity immutable.Option[string], request string, - txn datastore.Txn, ) *client.RequestResult { res := &client.RequestResult{} ast, err := db.parser.BuildRequestAST(request) @@ -55,6 +53,7 @@ func (db *db) execRequest( return res } + txn := mustGetContextTxn(ctx) planner := planner.New( ctx, identity, diff --git a/db/schema.go b/db/schema.go index 5b10df9906..6d52a92aee 100644 --- a/db/schema.go +++ b/db/schema.go @@ -23,7 +23,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/description" ) @@ -37,7 +36,6 @@ const ( // and creates the necessary collections, request types, etc. func (db *db) addSchema( ctx context.Context, - txn datastore.Txn, schemaString string, ) ([]client.CollectionDescription, error) { newDefinitions, err := db.parser.ParseSDL(ctx, schemaString) @@ -53,14 +51,14 @@ func (db *db) addSchema( return nil, err } - col, err := db.createCollection(ctx, txn, definition) + col, err := db.createCollection(ctx, definition) if err != nil { return nil, err } returnDescriptions[i] = col.Description() } - err = db.loadSchema(ctx, txn) + err = db.loadSchema(ctx) if err != nil { return nil, err } @@ -68,8 +66,10 @@ func (db *db) addSchema( return returnDescriptions, nil } -func (db *db) loadSchema(ctx context.Context, txn datastore.Txn) error { - definitions, err := db.getAllActiveDefinitions(ctx, txn) +func (db *db) loadSchema(ctx context.Context) error { + txn := mustGetContextTxn(ctx) + + definitions, err := db.getAllActiveDefinitions(ctx) if err != nil { return err } @@ -90,11 +90,12 @@ func (db *db) loadSchema(ctx context.Context, txn datastore.Txn) error { // will be applied. func (db *db) patchSchema( ctx context.Context, - txn datastore.Txn, patchString string, migration immutable.Option[model.Lens], setAsDefaultVersion bool, ) error { + txn := mustGetContextTxn(ctx) + patch, err := jsonpatch.DecodePatch([]byte(patchString)) if err != nil { return err @@ -137,7 +138,6 @@ func (db *db) patchSchema( for _, schema := range newSchemaByName { err := db.updateSchema( ctx, - txn, existingSchemaByName, newSchemaByName, schema, @@ -149,7 +149,7 @@ func (db *db) patchSchema( } } - return db.loadSchema(ctx, txn) + return db.loadSchema(ctx) } // substituteSchemaPatch handles any substitution of values that may be required before @@ -246,10 +246,9 @@ func substituteSchemaPatch( func (db *db) getSchemaByVersionID( ctx context.Context, - txn datastore.Txn, versionID string, ) (client.SchemaDescription, error) { - schemas, err := db.getSchemas(ctx, txn, client.SchemaFetchOptions{ID: immutable.Some(versionID)}) + schemas, err := db.getSchemas(ctx, client.SchemaFetchOptions{ID: immutable.Some(versionID)}) if err != nil { return client.SchemaDescription{}, err } @@ -260,9 +259,10 @@ func (db *db) getSchemaByVersionID( func (db *db) getSchemas( ctx context.Context, - txn datastore.Txn, options client.SchemaFetchOptions, ) ([]client.SchemaDescription, error) { + txn := mustGetContextTxn(ctx) + schemas := []client.SchemaDescription{} switch { diff --git a/db/sequence.go b/db/sequence.go index 3c510ec78c..f39bdcfb65 100644 --- a/db/sequence.go +++ b/db/sequence.go @@ -17,7 +17,6 @@ import ( ds "github.com/ipfs/go-datastore" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" ) @@ -26,15 +25,15 @@ type sequence struct { val uint64 } -func (db *db) getSequence(ctx context.Context, txn datastore.Txn, key core.Key) (*sequence, error) { +func (db *db) getSequence(ctx context.Context, key core.Key) (*sequence, error) { seq := &sequence{ key: key, val: uint64(0), } - _, err := seq.get(ctx, txn) + _, err := seq.get(ctx) if errors.Is(err, ds.ErrNotFound) { - err = seq.update(ctx, txn) + err = seq.update(ctx) if err != nil { return nil, err } @@ -45,7 +44,9 @@ func (db *db) getSequence(ctx context.Context, txn datastore.Txn, key core.Key) return seq, nil } -func (seq *sequence) get(ctx context.Context, txn datastore.Txn) (uint64, error) { +func (seq *sequence) get(ctx context.Context) (uint64, error) { + txn := mustGetContextTxn(ctx) + val, err := txn.Systemstore().Get(ctx, seq.key.ToDS()) if err != nil { return 0, err @@ -55,7 +56,9 @@ func (seq *sequence) get(ctx context.Context, txn datastore.Txn) (uint64, error) return seq.val, nil } -func (seq *sequence) update(ctx context.Context, txn datastore.Txn) error { +func (seq *sequence) update(ctx context.Context) error { + txn := mustGetContextTxn(ctx) + var buf [8]byte binary.BigEndian.PutUint64(buf[:], seq.val) if err := txn.Systemstore().Put(ctx, seq.key.ToDS(), buf[:]); err != nil { @@ -65,12 +68,12 @@ func (seq *sequence) update(ctx context.Context, txn datastore.Txn) error { return nil } -func (seq *sequence) next(ctx context.Context, txn datastore.Txn) (uint64, error) { - _, err := seq.get(ctx, txn) +func (seq *sequence) next(ctx context.Context) (uint64, error) { + _, err := seq.get(ctx) if err != nil { return 0, err } seq.val++ - return seq.val, seq.update(ctx, txn) + return seq.val, seq.update(ctx) } diff --git a/db/store.go b/db/store.go index aff11f851d..5a3f3f7ad6 100644 --- a/db/store.go +++ b/db/store.go @@ -34,7 +34,7 @@ func (db *db) ExecRequest( } defer txn.Discard(ctx) - res := db.execRequest(ctx, identity, request, txn) + res := db.execRequest(ctx, identity, request) if len(res.GQL.Errors) > 0 { return res } @@ -55,7 +55,7 @@ func (db *db) GetCollectionByName(ctx context.Context, name string) (client.Coll } defer txn.Discard(ctx) - return db.getCollectionByName(ctx, txn, name) + return db.getCollectionByName(ctx, name) } // GetCollections gets all the currently defined collections. @@ -69,7 +69,7 @@ func (db *db) GetCollections( } defer txn.Discard(ctx) - return db.getCollections(ctx, txn, options) + return db.getCollections(ctx, options) } // GetSchemaByVersionID returns the schema description for the schema version of the @@ -83,7 +83,7 @@ func (db *db) GetSchemaByVersionID(ctx context.Context, versionID string) (clien } defer txn.Discard(ctx) - return db.getSchemaByVersionID(ctx, txn, versionID) + return db.getSchemaByVersionID(ctx, versionID) } // GetSchemas returns all schema versions that currently exist within @@ -98,7 +98,7 @@ func (db *db) GetSchemas( } defer txn.Discard(ctx) - return db.getSchemas(ctx, txn, options) + return db.getSchemas(ctx, options) } // GetAllIndexes gets all the indexes in the database. @@ -111,7 +111,7 @@ func (db *db) GetAllIndexes( } defer txn.Discard(ctx) - return db.getAllIndexDescriptions(ctx, txn) + return db.getAllIndexDescriptions(ctx) } // AddSchema takes the provided GQL schema in SDL format, and applies it to the database, @@ -126,7 +126,7 @@ func (db *db) AddSchema(ctx context.Context, schemaString string) ([]client.Coll } defer txn.Discard(ctx) - cols, err := db.addSchema(ctx, txn, schemaString) + cols, err := db.addSchema(ctx, schemaString) if err != nil { return nil, err } @@ -160,7 +160,7 @@ func (db *db) PatchSchema( } defer txn.Discard(ctx) - err = db.patchSchema(ctx, txn, patchString, migration, setAsDefaultVersion) + err = db.patchSchema(ctx, patchString, migration, setAsDefaultVersion) if err != nil { return err } @@ -178,7 +178,7 @@ func (db *db) PatchCollection( } defer txn.Discard(ctx) - err = db.patchCollection(ctx, txn, patchString) + err = db.patchCollection(ctx, patchString) if err != nil { return err } @@ -193,7 +193,7 @@ func (db *db) SetActiveSchemaVersion(ctx context.Context, schemaVersionID string } defer txn.Discard(ctx) - err = db.setActiveSchemaVersion(ctx, txn, schemaVersionID) + err = db.setActiveSchemaVersion(ctx, schemaVersionID) if err != nil { return err } @@ -208,7 +208,7 @@ func (db *db) SetMigration(ctx context.Context, cfg client.LensConfig) error { } defer txn.Discard(ctx) - err = db.setMigration(ctx, txn, cfg) + err = db.setMigration(ctx, cfg) if err != nil { return err } @@ -228,7 +228,7 @@ func (db *db) AddView( } defer txn.Discard(ctx) - defs, err := db.addView(ctx, txn, query, sdl, transform) + defs, err := db.addView(ctx, query, sdl, transform) if err != nil { return nil, err } @@ -250,7 +250,7 @@ func (db *db) BasicImport(ctx context.Context, filepath string) error { } defer txn.Discard(ctx) - err = db.basicImport(ctx, txn, filepath) + err = db.basicImport(ctx, filepath) if err != nil { return err } @@ -266,7 +266,7 @@ func (db *db) BasicExport(ctx context.Context, config *client.BackupConfig) erro } defer txn.Discard(ctx) - err = db.basicExport(ctx, txn, config) + err = db.basicExport(ctx, config) if err != nil { return err } diff --git a/db/subscriptions.go b/db/subscriptions.go index e649769c18..a8c8f5bb42 100644 --- a/db/subscriptions.go +++ b/db/subscriptions.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/planner" ) @@ -63,7 +62,7 @@ func (db *db) handleSubscription( } ctx := SetContextTxn(ctx, txn) - db.handleEvent(ctx, identity, txn, pub, evt, r) + db.handleEvent(ctx, identity, pub, evt, r) txn.Discard(ctx) } } @@ -71,11 +70,11 @@ func (db *db) handleSubscription( func (db *db) handleEvent( ctx context.Context, identity immutable.Option[string], - txn datastore.Txn, pub *events.Publisher[events.Update], evt events.Update, r *request.ObjectSubscription, ) { + txn := mustGetContextTxn(ctx) p := planner.New( ctx, identity, diff --git a/db/view.go b/db/view.go index ea57f94541..5a778efd53 100644 --- a/db/view.go +++ b/db/view.go @@ -20,17 +20,17 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/description" ) func (db *db) addView( ctx context.Context, - txn datastore.Txn, inputQuery string, sdl string, transform immutable.Option[model.Lens], ) ([]client.CollectionDefinition, error) { + txn := mustGetContextTxn(ctx) + // Wrap the given query as part of the GQL query object - this simplifies the syntax for users // and ensures that we can't be given mutations. In the future this line should disappear along // with the all calls to the parser appart from `ParseSDL` when we implement the DQL stuff. @@ -80,7 +80,7 @@ func (db *db) addView( Schema: schema, } } else { - col, err := db.createCollection(ctx, txn, definition) + col, err := db.createCollection(ctx, definition) if err != nil { return nil, err } @@ -97,7 +97,7 @@ func (db *db) addView( } } - err = db.loadSchema(ctx, txn) + err = db.loadSchema(ctx) if err != nil { return nil, err } From 84297bc917d731f2fdd50765857813aa9b97452e Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Mon, 15 Apr 2024 22:02:57 -0400 Subject: [PATCH 29/49] fix: Allow update when updating non-indexed field (#2511) ## Relevant issue(s) Resolves #2510 ## Description This PR adds a check to the index validation step where if the resulting KV pair is the same on update as the previous KV pair, the outcome will be an no-op instead of an index existing error. --- db/index.go | 27 ++++++++++ .../index/update_unique_composite_test.go | 53 +++++++++++++++++++ tests/integration/index/update_unique_test.go | 51 ++++++++++++++++++ 3 files changed, 131 insertions(+) create mode 100644 tests/integration/index/update_unique_composite_test.go create mode 100644 tests/integration/index/update_unique_test.go diff --git a/db/index.go b/db/index.go index c041d3945c..693a18a5bf 100644 --- a/db/index.go +++ b/db/index.go @@ -381,6 +381,11 @@ func (index *collectionUniqueIndex) Update( oldDoc *client.Document, newDoc *client.Document, ) error { + // We only need to update the index if one of the indexed fields + // on the document has been changed. + if !isUpdatingIndexedFields(index, oldDoc, newDoc) { + return nil + } newKey, newVal, err := index.prepareIndexRecordToStore(ctx, txn, newDoc) if err != nil { return err @@ -403,3 +408,25 @@ func (index *collectionUniqueIndex) deleteDocIndex( } return index.deleteIndexKey(ctx, txn, key) } + +func isUpdatingIndexedFields(index CollectionIndex, oldDoc, newDoc *client.Document) bool { + for _, indexedFields := range index.Description().Fields { + oldVal, getOldValErr := oldDoc.GetValue(indexedFields.Name) + newVal, getNewValErr := newDoc.GetValue(indexedFields.Name) + + // GetValue will return an error when the field doesn't exist. + // This will happen for oldDoc only if the field hasn't been set + // when first creating the document. For newDoc, this will happen + // only if the field hasn't been set when first creating the document + // AND the field hasn't been set on the update. + switch { + case getOldValErr != nil && getNewValErr != nil: + continue + case getOldValErr != nil && getNewValErr == nil: + return true + case oldVal.Value() != newVal.Value(): + return true + } + } + return false +} diff --git a/tests/integration/index/update_unique_composite_test.go b/tests/integration/index/update_unique_composite_test.go new file mode 100644 index 0000000000..4621e79283 --- /dev/null +++ b/tests/integration/index/update_unique_composite_test.go @@ -0,0 +1,53 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestUniqueCompositeIndexUpdate_UponUpdatingDocWithExistingFieldValue_ShouldSucceed(t *testing.T) { + test := testUtils.TestCase{ + Description: "updating non-indexed fields on a doc with existing field combination for composite index should succeed", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, fields: ["name", "age"]) { + name: String + age: Int + email: String + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21, + "email": "email@gmail.com" + }`, + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: ` + { + "email": "another@gmail.com" + }`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/update_unique_test.go b/tests/integration/index/update_unique_test.go new file mode 100644 index 0000000000..c2743b313a --- /dev/null +++ b/tests/integration/index/update_unique_test.go @@ -0,0 +1,51 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestUniqueIndexUpdate_UponUpdatingDocNonIndexedField_ShouldSucceed(t *testing.T) { + test := testUtils.TestCase{ + Description: "updating non-indexed fields on a doc with a unique index should succeed", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(unique: true) + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Fred", + "age": 36 + }`, + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: ` + { + "age": 37 + }`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 5343bfd983323d2ed059d89ac5de0f6af2499847 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Tue, 16 Apr 2024 14:37:55 -0400 Subject: [PATCH 30/49] fix(i): Correctly handle inverse lenses from mult. sources (#2530) ## Relevant issue(s) Resolves #2433 ## Description Correctly handles inverse lenses from multiple sources. PR includes a new test for verifying that inversing across multiple versions does work, it is not relevant to the bug fix, but I wrote it to make sure I want reading the code wrong (as I first did when creating the issue). The bug is not testable atm, as this would only have become a problem when introducing multi-source collections (atm both `next` and `previous` slices are always empty before calling this line). Was likely copy-paste code. --- lens/history.go | 2 +- .../migrations/query/with_inverse_test.go | 114 ++++++++++++++++++ 2 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 tests/integration/schema/migrations/query/with_inverse_test.go diff --git a/lens/history.go b/lens/history.go index 3bf1a28ac8..eb793bff8c 100644 --- a/lens/history.go +++ b/lens/history.go @@ -174,7 +174,7 @@ func getCollectionHistory( srcSchemaVersion := schemaVersionsByColID[source.SourceCollectionID] src := history[srcSchemaVersion] historyItem.previous = append( - historyItem.next, + historyItem.previous, src, ) diff --git a/tests/integration/schema/migrations/query/with_inverse_test.go b/tests/integration/schema/migrations/query/with_inverse_test.go new file mode 100644 index 0000000000..2375fbb373 --- /dev/null +++ b/tests/integration/schema/migrations/query/with_inverse_test.go @@ -0,0 +1,114 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package query + +import ( + "testing" + + "github.com/lens-vm/lens/host-go/config/model" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/lenses" +) + +func TestSchemaMigrationQueryInversesAcrossMultipleVersions(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema migration, inverses across multiple migrated versions", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + height: Int + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "verified", "Kind": "Boolean"} } + ] + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "email", "Kind": "String"} } + ] + `, + }, + testUtils.ConfigureMigration{ + LensConfig: client.LensConfig{ + SourceSchemaVersionID: "bafkreieabpdpv5ua4f6lc5lprud4vvbefmfinzqaewhx5gzuf7anwgrqmy", + DestinationSchemaVersionID: "bafkreid2g456hvlkedusgfp6argh76a74ymrlii2ag4yqqsn2sgt4pkslu", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "age", + "value": 30, + }, + }, + }, + }, + }, + }, + testUtils.ConfigureMigration{ + LensConfig: client.LensConfig{ + SourceSchemaVersionID: "bafkreid2g456hvlkedusgfp6argh76a74ymrlii2ag4yqqsn2sgt4pkslu", + DestinationSchemaVersionID: "bafkreibwswh2pxloduldc2l5h5jzm7b6fqt3s4vijq3nssmn3rr5gws2ki", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "height", + "value": 190, + }, + }, + }, + }, + }, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 33, + "height": 185 + }`, + }, + testUtils.SetActiveSchemaVersion{ + SchemaVersionID: "bafkreieabpdpv5ua4f6lc5lprud4vvbefmfinzqaewhx5gzuf7anwgrqmy", + }, + testUtils.Request{ + Request: `query { + Users { + name + age + height + } + }`, + Results: []map[string]any{ + { + "name": "John", + "age": nil, + "height": nil, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 734b3263de234d6c896162c67adf10043ecbc7a9 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 18 Apr 2024 16:29:09 -0700 Subject: [PATCH 31/49] refactor(i): DB identity context (#2528) ## Relevant issue(s) Resolves #2439 ## Description This PR moves the identity param from the `client.DB` and `client.Collection` interfaces to the context. Notable changes: - `acp/identity.go` added `Identity` type alias to make future updates simpler ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- acp/identity/identity.go | 24 +- cli/acp_policy_add.go | 20 - cli/client.go | 5 + cli/collection.go | 5 + cli/collection_create.go | 20 +- cli/collection_delete.go | 21 +- cli/collection_get.go | 17 +- cli/collection_list_doc_ids.go | 18 +- cli/collection_update.go | 25 +- cli/request.go | 17 +- cli/utils.go | 12 + client/collection.go | 23 +- client/db.go | 10 +- client/mocks/collection.go | 478 ++++++++---------- client/mocks/db.go | 65 +-- db/backup.go | 16 +- db/backup_test.go | 59 ++- db/collection.go | 48 +- db/collection_acp.go | 8 +- db/collection_delete.go | 33 +- db/collection_get.go | 9 +- db/collection_index.go | 6 +- db/collection_update.go | 38 +- db/context.go | 37 +- db/db.go | 5 +- db/fetcher/fetcher.go | 9 +- db/fetcher/indexer.go | 3 +- db/fetcher/mocks/fetcher.go | 24 +- db/fetcher/versioned.go | 3 +- db/indexed_docs_test.go | 46 +- db/permission/check.go | 7 +- db/permission/register.go | 5 +- db/request.go | 11 +- db/store.go | 8 +- db/subscriptions.go | 7 +- docs/cli/defradb_client.md | 5 +- docs/cli/defradb_client_acp.md | 1 + docs/cli/defradb_client_acp_policy.md | 1 + docs/cli/defradb_client_acp_policy_add.md | 6 +- docs/cli/defradb_client_backup.md | 1 + docs/cli/defradb_client_backup_export.md | 1 + docs/cli/defradb_client_backup_import.md | 1 + docs/cli/defradb_client_collection.md | 13 +- docs/cli/defradb_client_collection_create.md | 6 +- docs/cli/defradb_client_collection_delete.md | 8 +- .../cli/defradb_client_collection_describe.md | 1 + docs/cli/defradb_client_collection_docIDs.md | 4 +- docs/cli/defradb_client_collection_get.md | 6 +- docs/cli/defradb_client_collection_patch.md | 1 + docs/cli/defradb_client_collection_update.md | 10 +- docs/cli/defradb_client_dump.md | 1 + docs/cli/defradb_client_index.md | 1 + docs/cli/defradb_client_index_create.md | 1 + docs/cli/defradb_client_index_drop.md | 1 + docs/cli/defradb_client_index_list.md | 1 + docs/cli/defradb_client_p2p.md | 1 + docs/cli/defradb_client_p2p_collection.md | 1 + docs/cli/defradb_client_p2p_collection_add.md | 1 + .../defradb_client_p2p_collection_getall.md | 1 + .../defradb_client_p2p_collection_remove.md | 1 + docs/cli/defradb_client_p2p_info.md | 1 + docs/cli/defradb_client_p2p_replicator.md | 1 + .../defradb_client_p2p_replicator_delete.md | 1 + .../defradb_client_p2p_replicator_getall.md | 1 + docs/cli/defradb_client_p2p_replicator_set.md | 1 + docs/cli/defradb_client_query.md | 6 +- docs/cli/defradb_client_schema.md | 1 + docs/cli/defradb_client_schema_add.md | 1 + docs/cli/defradb_client_schema_describe.md | 1 + docs/cli/defradb_client_schema_migration.md | 1 + .../defradb_client_schema_migration_down.md | 1 + .../defradb_client_schema_migration_reload.md | 1 + ...db_client_schema_migration_set-registry.md | 1 + .../defradb_client_schema_migration_set.md | 1 + .../cli/defradb_client_schema_migration_up.md | 1 + docs/cli/defradb_client_schema_patch.md | 1 + docs/cli/defradb_client_schema_set-active.md | 1 + docs/cli/defradb_client_tx.md | 1 + docs/cli/defradb_client_tx_commit.md | 1 + docs/cli/defradb_client_tx_create.md | 1 + docs/cli/defradb_client_tx_discard.md | 1 + docs/cli/defradb_client_view.md | 1 + docs/cli/defradb_client_view_add.md | 1 + http/client.go | 2 - http/client_acp.go | 23 +- http/client_collection.go | 60 +-- http/handler.go | 1 + http/handler_acp.go | 20 +- http/handler_ccip.go | 3 +- http/handler_ccip_test.go | 3 +- http/handler_collection.go | 40 +- http/handler_store.go | 3 +- http/http_client.go | 6 +- http/middleware.go | 35 +- http/openapi.go | 1 - http/utils.go | 46 -- lens/fetcher.go | 3 +- net/client_test.go | 5 +- net/dag_test.go | 3 +- net/peer_collection.go | 29 +- net/peer_replicator.go | 7 +- net/peer_test.go | 39 +- net/server.go | 7 +- net/server_test.go | 8 +- planner/create.go | 1 - planner/delete.go | 1 - planner/planner.go | 5 +- planner/update.go | 2 +- tests/bench/bench_util.go | 3 +- tests/bench/collection/utils.go | 9 +- tests/bench/query/planner/utils.go | 2 +- tests/bench/query/simple/utils.go | 3 +- tests/clients/cli/wrapper.go | 7 - tests/clients/cli/wrapper_cli.go | 4 + tests/clients/cli/wrapper_collection.go | 91 +--- tests/clients/http/wrapper.go | 6 +- tests/gen/cli/gendocs.go | 3 +- tests/integration/acp.go | 14 +- .../integration/acp/add_policy/basic_test.go | 4 +- tests/integration/acp/add_policy/fixture.go | 4 +- .../acp/add_policy/with_empty_args_test.go | 6 +- .../with_extra_perms_and_relations_test.go | 2 +- .../acp/add_policy/with_extra_perms_test.go | 4 +- .../add_policy/with_extra_relations_test.go | 4 +- .../with_invalid_creator_arg_test.go | 4 +- .../add_policy/with_invalid_relations_test.go | 4 +- .../with_invalid_required_relation_test.go | 4 +- .../add_policy/with_invalid_resource_test.go | 2 +- .../add_policy/with_managed_relation_test.go | 2 +- .../add_policy/with_multi_policies_test.go | 20 +- .../with_multiple_resources_test.go | 6 +- .../acp/add_policy/with_no_perms_test.go | 8 +- .../acp/add_policy/with_no_resources_test.go | 6 +- .../acp/add_policy/with_perm_expr_test.go | 4 +- .../add_policy/with_perm_invalid_expr_test.go | 6 +- .../with_permissionless_owner_test.go | 6 +- .../add_policy/with_unused_relations_test.go | 2 +- tests/integration/acp/fixture.go | 6 +- tests/integration/acp/index/create_test.go | 4 +- tests/integration/acp/p2p/replicator_test.go | 2 +- tests/integration/acp/p2p/subscribe_test.go | 2 +- .../acp/register_and_delete_test.go | 20 +- .../integration/acp/register_and_read_test.go | 22 +- .../acp/register_and_update_test.go | 30 +- .../add_dpi/accept_basic_dpi_fmts_test.go | 4 +- .../accept_extra_permissions_on_dpi_test.go | 6 +- .../accept_managed_relation_on_dpi_test.go | 2 +- ...ept_mixed_resources_on_partial_dpi_test.go | 2 +- .../schema/add_dpi/accept_multi_dpis_test.go | 4 +- .../accept_multi_resources_on_dpi_test.go | 4 +- ...cept_same_resource_on_diff_schemas_test.go | 2 +- .../integration/acp/schema/add_dpi/fixture.go | 4 +- .../reject_empty_arg_on_schema_test.go | 4 +- .../reject_invalid_arg_type_on_schema_test.go | 4 +- ...ect_invalid_owner_read_perm_on_dpi_test.go | 10 +- ...alid_owner_read_perm_symbol_on_dpi_test.go | 6 +- ...ct_invalid_owner_write_perm_on_dpi_test.go | 10 +- ...lid_owner_write_perm_symbol_on_dpi_test.go | 6 +- .../schema/add_dpi/reject_missing_dpi_test.go | 2 +- .../reject_missing_id_arg_on_schema_test.go | 4 +- .../reject_missing_perms_on_dpi_test.go | 2 +- ...ect_missing_resource_arg_on_schema_test.go | 4 +- .../reject_missing_resource_on_dpi_test.go | 2 +- ...ect_mixed_resources_on_partial_dpi_test.go | 2 +- .../update/simple/with_doc_id_test.go | 12 +- .../update/simple/with_doc_ids_test.go | 14 +- .../update/simple/with_filter_test.go | 11 +- tests/integration/collection/utils.go | 3 +- .../updates/remove/policy_test.go | 2 +- .../events/simple/with_create_test.go | 5 +- .../events/simple/with_create_txn_test.go | 3 - .../events/simple/with_delete_test.go | 5 +- .../events/simple/with_update_test.go | 7 +- tests/integration/events/utils.go | 3 +- tests/integration/explain.go | 2 - tests/integration/net/order/utils.go | 27 +- tests/integration/utils2.go | 106 ++-- 177 files changed, 902 insertions(+), 1306 deletions(-) diff --git a/acp/identity/identity.go b/acp/identity/identity.go index ba6efb71fa..108c183748 100644 --- a/acp/identity/identity.go +++ b/acp/identity/identity.go @@ -14,22 +14,28 @@ Package identity provides defradb identity. package identity -import ( - "github.com/sourcenetwork/immutable" -) +import "github.com/sourcenetwork/immutable" + +// Identity is the unique identifier for an actor. +type Identity string var ( - // NoIdentity is an empty identity. - NoIdentity = immutable.None[string]() + // None is an empty identity. + None = immutable.None[Identity]() ) -// NewIdentity makes a new identity if the input is not empty otherwise, returns an empty Option. -func NewIdentity(identity string) immutable.Option[string] { +// New makes a new identity if the input is not empty otherwise, returns None. +func New(identity string) immutable.Option[Identity] { // TODO-ACP: There will be more validation once sourcehub gets some utilities. // Then a validation function would do the validation, will likely do outside this function. // https://github.com/sourcenetwork/defradb/issues/2358 if identity == "" { - return NoIdentity + return None } - return immutable.Some[string](identity) + return immutable.Some(Identity(identity)) +} + +// String returns the string representation of the identity. +func (i Identity) String() string { + return string(i) } diff --git a/cli/acp_policy_add.go b/cli/acp_policy_add.go index 01914b37c6..bca5e95abd 100644 --- a/cli/acp_policy_add.go +++ b/cli/acp_policy_add.go @@ -15,18 +15,12 @@ import ( "os" "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/acp" ) func MakeACPPolicyAddCommand() *cobra.Command { - const identityFlagLongRequired string = "identity" - const identityFlagShortRequired string = "i" - const fileFlagLong string = "file" const fileFlagShort string = "f" - var identityValue string var policyFile string var cmd = &cobra.Command{ @@ -77,10 +71,6 @@ Example: add from stdin: `, RunE: func(cmd *cobra.Command, args []string) error { - if identityValue == "" { - return acp.ErrPolicyCreatorMustNotBeEmpty - } - // TODO-ACP: Ensure here (before going through acp system) if the required identity argument // is valid, if it is valid then keep proceeding further, otherwise return this error: // `NewErrRequiredFlagInvalid(identityFlagLongRequired, identityFlagShortRequired)` @@ -114,7 +104,6 @@ Example: add from stdin: db := mustGetContextDB(cmd) policyResult, err := db.AddPolicy( cmd.Context(), - identityValue, policy, ) @@ -126,14 +115,5 @@ Example: add from stdin: }, } cmd.Flags().StringVarP(&policyFile, fileFlagLong, fileFlagShort, "", "File to load a policy from") - cmd.Flags().StringVarP( - &identityValue, - identityFlagLongRequired, - identityFlagShortRequired, - "", - "[Required] Identity of the creator", - ) - _ = cmd.MarkFlagRequired(identityFlagLongRequired) - return cmd } diff --git a/cli/client.go b/cli/client.go index 475f83a80a..06460ca70d 100644 --- a/cli/client.go +++ b/cli/client.go @@ -16,6 +16,7 @@ import ( func MakeClientCommand() *cobra.Command { var txID uint64 + var identity string var cmd = &cobra.Command{ Use: "client", Short: "Interact with a DefraDB node", @@ -28,12 +29,16 @@ Execute queries, add schema types, obtain node info, etc.`, if err := setContextConfig(cmd); err != nil { return err } + if err := setContextIdentity(cmd, identity); err != nil { + return err + } if err := setContextTransaction(cmd, txID); err != nil { return err } return setContextDB(cmd) }, } + cmd.PersistentFlags().StringVarP(&identity, "identity", "i", "", "ACP Identity") cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") return cmd } diff --git a/cli/collection.go b/cli/collection.go index 5b682e5366..cdf3d41f5a 100644 --- a/cli/collection.go +++ b/cli/collection.go @@ -21,6 +21,7 @@ import ( func MakeCollectionCommand() *cobra.Command { var txID uint64 + var identity string var name string var schemaRoot string var versionID string @@ -37,6 +38,9 @@ func MakeCollectionCommand() *cobra.Command { if err := setContextConfig(cmd); err != nil { return err } + if err := setContextIdentity(cmd, identity); err != nil { + return err + } if err := setContextTransaction(cmd, txID); err != nil { return err } @@ -76,6 +80,7 @@ func MakeCollectionCommand() *cobra.Command { }, } cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") + cmd.PersistentFlags().StringVarP(&identity, "identity", "i", "", "ACP Identity") cmd.PersistentFlags().StringVar(&name, "name", "", "Collection name") cmd.PersistentFlags().StringVar(&schemaRoot, "schema", "", "Collection schema Root") cmd.PersistentFlags().StringVar(&versionID, "version", "", "Collection version ID") diff --git a/cli/collection_create.go b/cli/collection_create.go index 0af57d77ed..c61a286326 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -16,17 +16,11 @@ import ( "github.com/spf13/cobra" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) func MakeCollectionCreateCommand() *cobra.Command { - const identityFlagLongRequired string = "identity" - const identityFlagShortRequired string = "i" - - var identityValue string var file string - var cmd = &cobra.Command{ Use: "create [-i --identity] ", Short: "Create a new document.", @@ -49,9 +43,6 @@ Example: create from stdin: `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. - identity := acpIdentity.NewIdentity(identityValue) - var docData []byte switch { case file != "": @@ -82,23 +73,16 @@ Example: create from stdin: if err != nil { return err } - return col.CreateMany(cmd.Context(), identity, docs) + return col.CreateMany(cmd.Context(), docs) } doc, err := client.NewDocFromJSON(docData, col.Schema()) if err != nil { return err } - return col.Create(cmd.Context(), identity, doc) + return col.Create(cmd.Context(), doc) }, } cmd.Flags().StringVarP(&file, "file", "f", "", "File containing document(s)") - cmd.Flags().StringVarP( - &identityValue, - identityFlagLongRequired, - identityFlagShortRequired, - "", - "Identity of the actor", - ) return cmd } diff --git a/cli/collection_delete.go b/cli/collection_delete.go index 1d1c128948..5bbe32a964 100644 --- a/cli/collection_delete.go +++ b/cli/collection_delete.go @@ -13,15 +13,10 @@ package cli import ( "github.com/spf13/cobra" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) func MakeCollectionDeleteCommand() *cobra.Command { - const identityFlagLongRequired string = "identity" - const identityFlagShortRequired string = "i" - - var identityValue string var argDocIDs []string var filter string var cmd = &cobra.Command{ @@ -39,9 +34,6 @@ Example: delete by filter: defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' `, RunE: func(cmd *cobra.Command, args []string) error { - // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. - identity := acpIdentity.NewIdentity(identityValue) - col, ok := tryGetContextCollection(cmd) if !ok { return cmd.Usage() @@ -53,7 +45,7 @@ Example: delete by filter: if err != nil { return err } - res, err := col.DeleteWithDocID(cmd.Context(), identity, docID) + res, err := col.DeleteWithDocID(cmd.Context(), docID) if err != nil { return err } @@ -67,13 +59,13 @@ Example: delete by filter: } docIDs[i] = docID } - res, err := col.DeleteWithDocIDs(cmd.Context(), identity, docIDs) + res, err := col.DeleteWithDocIDs(cmd.Context(), docIDs) if err != nil { return err } return writeJSON(cmd, res) case filter != "": - res, err := col.DeleteWithFilter(cmd.Context(), identity, filter) + res, err := col.DeleteWithFilter(cmd.Context(), filter) if err != nil { return err } @@ -85,12 +77,5 @@ Example: delete by filter: } cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") - cmd.Flags().StringVarP( - &identityValue, - identityFlagLongRequired, - identityFlagShortRequired, - "", - "Identity of the actor", - ) return cmd } diff --git a/cli/collection_get.go b/cli/collection_get.go index 1a924ea1aa..9ad5566f62 100644 --- a/cli/collection_get.go +++ b/cli/collection_get.go @@ -13,15 +13,10 @@ package cli import ( "github.com/spf13/cobra" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) func MakeCollectionGetCommand() *cobra.Command { - const identityFlagLongRequired string = "identity" - const identityFlagShortRequired string = "i" - - var identityValue string var showDeleted bool var cmd = &cobra.Command{ Use: "get [-i --identity] [--show-deleted] ", @@ -36,9 +31,6 @@ Example to get a private document we must use an identity: `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. - identity := acpIdentity.NewIdentity(identityValue) - col, ok := tryGetContextCollection(cmd) if !ok { return cmd.Usage() @@ -48,7 +40,7 @@ Example to get a private document we must use an identity: if err != nil { return err } - doc, err := col.Get(cmd.Context(), identity, docID, showDeleted) + doc, err := col.Get(cmd.Context(), docID, showDeleted) if err != nil { return err } @@ -60,12 +52,5 @@ Example to get a private document we must use an identity: }, } cmd.Flags().BoolVar(&showDeleted, "show-deleted", false, "Show deleted documents") - cmd.Flags().StringVarP( - &identityValue, - identityFlagLongRequired, - identityFlagShortRequired, - "", - "Identity of the actor", - ) return cmd } diff --git a/cli/collection_list_doc_ids.go b/cli/collection_list_doc_ids.go index 10f6d879bf..168bb74a5a 100644 --- a/cli/collection_list_doc_ids.go +++ b/cli/collection_list_doc_ids.go @@ -13,16 +13,10 @@ package cli import ( "github.com/spf13/cobra" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/http" ) func MakeCollectionListDocIDsCommand() *cobra.Command { - const identityFlagLongRequired string = "identity" - const identityFlagShortRequired string = "i" - - var identityValue string - var cmd = &cobra.Command{ Use: "docIDs [-i --identity]", Short: "List all document IDs (docIDs).", @@ -35,15 +29,12 @@ Example: list all docID(s), with an identity: defradb client collection docIDs -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User `, RunE: func(cmd *cobra.Command, args []string) error { - // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. - identity := acpIdentity.NewIdentity(identityValue) - col, ok := tryGetContextCollection(cmd) if !ok { return cmd.Usage() } - docCh, err := col.GetAllDocIDs(cmd.Context(), identity) + docCh, err := col.GetAllDocIDs(cmd.Context()) if err != nil { return err } @@ -61,12 +52,5 @@ Example: list all docID(s), with an identity: return nil }, } - cmd.Flags().StringVarP( - &identityValue, - identityFlagLongRequired, - identityFlagShortRequired, - "", - "Identity of the actor", - ) return cmd } diff --git a/cli/collection_update.go b/cli/collection_update.go index 816cad8029..2777c0ed98 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -13,15 +13,10 @@ package cli import ( "github.com/spf13/cobra" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) func MakeCollectionUpdateCommand() *cobra.Command { - const identityFlagLongRequired string = "identity" - const identityFlagShortRequired string = "i" - - var identityValue string var argDocIDs []string var filter string var updater string @@ -47,9 +42,6 @@ Example: update private docIDs, with identity: `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. - identity := acpIdentity.NewIdentity(identityValue) - col, ok := tryGetContextCollection(cmd) if !ok { return cmd.Usage() @@ -61,7 +53,7 @@ Example: update private docIDs, with identity: if err != nil { return err } - res, err := col.UpdateWithDocID(cmd.Context(), identity, docID, updater) + res, err := col.UpdateWithDocID(cmd.Context(), docID, updater) if err != nil { return err } @@ -75,13 +67,13 @@ Example: update private docIDs, with identity: } docIDs[i] = docID } - res, err := col.UpdateWithDocIDs(cmd.Context(), identity, docIDs, updater) + res, err := col.UpdateWithDocIDs(cmd.Context(), docIDs, updater) if err != nil { return err } return writeJSON(cmd, res) case filter != "" && updater != "": - res, err := col.UpdateWithFilter(cmd.Context(), identity, filter, updater) + res, err := col.UpdateWithFilter(cmd.Context(), filter, updater) if err != nil { return err } @@ -91,14 +83,14 @@ Example: update private docIDs, with identity: if err != nil { return err } - doc, err := col.Get(cmd.Context(), identity, docID, true) + doc, err := col.Get(cmd.Context(), docID, true) if err != nil { return err } if err := doc.SetWithJSON([]byte(args[0])); err != nil { return err } - return col.Update(cmd.Context(), identity, doc) + return col.Update(cmd.Context(), doc) default: return ErrNoDocIDOrFilter } @@ -107,12 +99,5 @@ Example: update private docIDs, with identity: cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") cmd.Flags().StringVar(&updater, "updater", "", "Document updater") - cmd.Flags().StringVarP( - &identityValue, - identityFlagLongRequired, - identityFlagShortRequired, - "", - "Identity of the actor", - ) return cmd } diff --git a/cli/request.go b/cli/request.go index c583d51a28..3dba0c197d 100644 --- a/cli/request.go +++ b/cli/request.go @@ -16,7 +16,6 @@ import ( "github.com/spf13/cobra" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/errors" ) @@ -26,10 +25,6 @@ const ( ) func MakeRequestCommand() *cobra.Command { - const identityFlagLongRequired string = "identity" - const identityFlagShortRequired string = "i" - - var identityValue string var filePath string var cmd = &cobra.Command{ Use: "query [-i --identity] [request]", @@ -53,9 +48,6 @@ with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. - identity := acpIdentity.NewIdentity(identityValue) - var request string switch { case filePath != "": @@ -79,7 +71,7 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so } store := mustGetContextStore(cmd) - result := store.ExecRequest(cmd.Context(), identity, request) + result := store.ExecRequest(cmd.Context(), request) var errors []string for _, err := range result.GQL.Errors { @@ -98,12 +90,5 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so } cmd.Flags().StringVarP(&filePath, "file", "f", "", "File containing the query request") - cmd.Flags().StringVarP( - &identityValue, - identityFlagLongRequired, - identityFlagShortRequired, - "", - "Identity of the actor", - ) return cmd } diff --git a/cli/utils.go b/cli/utils.go index 1df10a3409..25af57528b 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -20,6 +20,7 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/http" @@ -123,6 +124,17 @@ func setContextTransaction(cmd *cobra.Command, txId uint64) error { return nil } +// setContextIdentity sets the identity for the current command context. +func setContextIdentity(cmd *cobra.Command, identity string) error { + // TODO-ACP: `https://github.com/sourcenetwork/defradb/issues/2358` do the validation here. + if identity == "" { + return nil + } + ctx := db.SetContextIdentity(cmd.Context(), acpIdentity.New(identity)) + cmd.SetContext(ctx) + return nil +} + // setContextRootDir sets the rootdir for the current command context. func setContextRootDir(cmd *cobra.Command) error { rootdir, err := cmd.Root().PersistentFlags().GetString("rootdir") diff --git a/client/collection.go b/client/collection.go index bab61607a9..05cb821889 100644 --- a/client/collection.go +++ b/client/collection.go @@ -44,12 +44,12 @@ type Collection interface { // Create a new document. // // Will verify the DocID/CID to ensure that the new document is correctly formatted. - Create(ctx context.Context, identity immutable.Option[string], doc *Document) error + Create(ctx context.Context, doc *Document) error // CreateMany new documents. // // Will verify the DocIDs/CIDs to ensure that the new documents are correctly formatted. - CreateMany(ctx context.Context, identity immutable.Option[string], docs []*Document) error + CreateMany(ctx context.Context, docs []*Document) error // Update an existing document with the new values. // @@ -57,13 +57,13 @@ type Collection interface { // Any field that is nil/empty that hasn't called Clear will be ignored. // // Will return a ErrDocumentNotFound error if the given document is not found. - Update(ctx context.Context, identity immutable.Option[string], docs *Document) error + Update(ctx context.Context, docs *Document) error // Save the given document in the database. // // If a document exists with the given DocID it will update it. Otherwise a new document // will be created. - Save(ctx context.Context, identity immutable.Option[string], doc *Document) error + Save(ctx context.Context, doc *Document) error // Delete will attempt to delete a document by DocID. // @@ -71,12 +71,12 @@ type Collection interface { // if it cannot. If the document doesn't exist, then it will return false and a ErrDocumentNotFound error. // This operation will hard-delete all state relating to the given DocID. // This includes data, block, and head storage. - Delete(ctx context.Context, identity immutable.Option[string], docID DocID) (bool, error) + Delete(ctx context.Context, docID DocID) (bool, error) // Exists checks if a given document exists with supplied DocID. // // Will return true if a matching document exists, otherwise will return false. - Exists(ctx context.Context, identity immutable.Option[string], docID DocID) (bool, error) + Exists(ctx context.Context, docID DocID) (bool, error) // UpdateWith updates a target document using the given updater type. // @@ -89,7 +89,6 @@ type Collection interface { // Returns an ErrInvalidUpdater error if the updater type is not supported. UpdateWith( ctx context.Context, - identity immutable.Option[string], target any, updater string, ) (*UpdateResult, error) @@ -100,7 +99,6 @@ type Collection interface { // else an ErrInvalidUpdater will be returned. UpdateWithFilter( ctx context.Context, - identity immutable.Option[string], filter any, updater string, ) (*UpdateResult, error) @@ -113,7 +111,6 @@ type Collection interface { // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. UpdateWithDocID( ctx context.Context, - identity immutable.Option[string], docID DocID, updater string, ) (*UpdateResult, error) @@ -126,7 +123,6 @@ type Collection interface { // Returns an ErrDocumentNotFound if a document is not found for any given DocID. UpdateWithDocIDs( ctx context.Context, - identity immutable.Option[string], docIDs []DocID, updater string, ) (*UpdateResult, error) @@ -142,7 +138,6 @@ type Collection interface { // Returns an ErrInvalidDeleteTarget if the target type is not supported. DeleteWith( ctx context.Context, - identity immutable.Option[string], target any, ) (*DeleteResult, error) @@ -152,7 +147,6 @@ type Collection interface { // with a status of `Deleted`. DeleteWithFilter( ctx context.Context, - identity immutable.Option[string], filter any, ) (*DeleteResult, error) @@ -164,7 +158,6 @@ type Collection interface { // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. DeleteWithDocID( ctx context.Context, - identity immutable.Option[string], docID DocID, ) (*DeleteResult, error) @@ -176,7 +169,6 @@ type Collection interface { // Returns an ErrDocumentNotFound if a document is not found for any given DocID. DeleteWithDocIDs( ctx context.Context, - identity immutable.Option[string], docIDs []DocID, ) (*DeleteResult, error) @@ -185,13 +177,12 @@ type Collection interface { // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. Get( ctx context.Context, - identity immutable.Option[string], docID DocID, showDeleted bool, ) (*Document, error) // GetAllDocIDs returns all the document IDs that exist in the collection. - GetAllDocIDs(ctx context.Context, identity immutable.Option[string]) (<-chan DocIDResult, error) + GetAllDocIDs(ctx context.Context) (<-chan DocIDResult, error) // CreateIndex creates a new index on the collection. // `IndexDescription` contains the description of the index to be created. diff --git a/client/db.go b/client/db.go index cedd63d492..c5cb95eb4b 100644 --- a/client/db.go +++ b/client/db.go @@ -93,7 +93,7 @@ type DB interface { // validation fails. // // Note: A policy can not be added without the creatorID (identity). - AddPolicy(ctx context.Context, creatorID string, policy string) (AddPolicyResult, error) + AddPolicy(ctx context.Context, policy string) (AddPolicyResult, error) } // Store contains the core DefraDB read-write operations. @@ -235,12 +235,8 @@ type Store interface { // GetAllIndexes returns all the indexes that currently exist within this [Store]. GetAllIndexes(context.Context) (map[CollectionName][]IndexDescription, error) - // ExecRequest executes the given GQL request against the [Store], with the given identity. - ExecRequest( - ctx context.Context, - identity immutable.Option[string], - request string, - ) *RequestResult + // ExecRequest executes the given GQL request against the [Store]. + ExecRequest(ctx context.Context, request string) *RequestResult } // GQLResult represents the immediate results of a GQL request. diff --git a/client/mocks/collection.go b/client/mocks/collection.go index 397bac7d1b..fac90fe1e4 100644 --- a/client/mocks/collection.go +++ b/client/mocks/collection.go @@ -7,8 +7,6 @@ import ( client "github.com/sourcenetwork/defradb/client" - datastore "github.com/sourcenetwork/defradb/datastore" - immutable "github.com/sourcenetwork/immutable" mock "github.com/stretchr/testify/mock" @@ -27,13 +25,13 @@ func (_m *Collection) EXPECT() *Collection_Expecter { return &Collection_Expecter{mock: &_m.Mock} } -// Create provides a mock function with given fields: ctx, identity, doc -func (_m *Collection) Create(ctx context.Context, identity immutable.Option[string], doc *client.Document) error { - ret := _m.Called(ctx, identity, doc) +// Create provides a mock function with given fields: ctx, doc +func (_m *Collection) Create(ctx context.Context, doc *client.Document) error { + ret := _m.Called(ctx, doc) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], *client.Document) error); ok { - r0 = rf(ctx, identity, doc) + if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { + r0 = rf(ctx, doc) } else { r0 = ret.Error(0) } @@ -48,15 +46,14 @@ type Collection_Create_Call struct { // Create is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - doc *client.Document -func (_e *Collection_Expecter) Create(ctx interface{}, identity interface{}, doc interface{}) *Collection_Create_Call { - return &Collection_Create_Call{Call: _e.mock.On("Create", ctx, identity, doc)} +func (_e *Collection_Expecter) Create(ctx interface{}, doc interface{}) *Collection_Create_Call { + return &Collection_Create_Call{Call: _e.mock.On("Create", ctx, doc)} } -func (_c *Collection_Create_Call) Run(run func(ctx context.Context, identity immutable.Option[string], doc *client.Document)) *Collection_Create_Call { +func (_c *Collection_Create_Call) Run(run func(ctx context.Context, doc *client.Document)) *Collection_Create_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(*client.Document)) + run(args[0].(context.Context), args[1].(*client.Document)) }) return _c } @@ -66,7 +63,7 @@ func (_c *Collection_Create_Call) Return(_a0 error) *Collection_Create_Call { return _c } -func (_c *Collection_Create_Call) RunAndReturn(run func(context.Context, immutable.Option[string], *client.Document) error) *Collection_Create_Call { +func (_c *Collection_Create_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_Create_Call { _c.Call.Return(run) return _c } @@ -167,13 +164,13 @@ func (_c *Collection_CreateIndex_Call) RunAndReturn(run func(context.Context, cl return _c } -// CreateMany provides a mock function with given fields: ctx, identity, docs -func (_m *Collection) CreateMany(ctx context.Context, identity immutable.Option[string], docs []*client.Document) error { - ret := _m.Called(ctx, identity, docs) +// CreateMany provides a mock function with given fields: ctx, docs +func (_m *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { + ret := _m.Called(ctx, docs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []*client.Document) error); ok { - r0 = rf(ctx, identity, docs) + if rf, ok := ret.Get(0).(func(context.Context, []*client.Document) error); ok { + r0 = rf(ctx, docs) } else { r0 = ret.Error(0) } @@ -188,15 +185,14 @@ type Collection_CreateMany_Call struct { // CreateMany is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docs []*client.Document -func (_e *Collection_Expecter) CreateMany(ctx interface{}, identity interface{}, docs interface{}) *Collection_CreateMany_Call { - return &Collection_CreateMany_Call{Call: _e.mock.On("CreateMany", ctx, identity, docs)} +func (_e *Collection_Expecter) CreateMany(ctx interface{}, docs interface{}) *Collection_CreateMany_Call { + return &Collection_CreateMany_Call{Call: _e.mock.On("CreateMany", ctx, docs)} } -func (_c *Collection_CreateMany_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docs []*client.Document)) *Collection_CreateMany_Call { +func (_c *Collection_CreateMany_Call) Run(run func(ctx context.Context, docs []*client.Document)) *Collection_CreateMany_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].([]*client.Document)) + run(args[0].(context.Context), args[1].([]*client.Document)) }) return _c } @@ -206,7 +202,7 @@ func (_c *Collection_CreateMany_Call) Return(_a0 error) *Collection_CreateMany_C return _c } -func (_c *Collection_CreateMany_Call) RunAndReturn(run func(context.Context, immutable.Option[string], []*client.Document) error) *Collection_CreateMany_Call { +func (_c *Collection_CreateMany_Call) RunAndReturn(run func(context.Context, []*client.Document) error) *Collection_CreateMany_Call { _c.Call.Return(run) return _c } @@ -252,23 +248,23 @@ func (_c *Collection_Definition_Call) RunAndReturn(run func() client.CollectionD return _c } -// Delete provides a mock function with given fields: ctx, identity, docID -func (_m *Collection) Delete(ctx context.Context, identity immutable.Option[string], docID client.DocID) (bool, error) { - ret := _m.Called(ctx, identity, docID) +// Delete provides a mock function with given fields: ctx, docID +func (_m *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { + ret := _m.Called(ctx, docID) var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) (bool, error)); ok { - return rf(ctx, identity, docID) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { + return rf(ctx, docID) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) bool); ok { - r0 = rf(ctx, identity, docID) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { + r0 = rf(ctx, docID) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID) error); ok { - r1 = rf(ctx, identity, docID) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { + r1 = rf(ctx, docID) } else { r1 = ret.Error(1) } @@ -283,15 +279,14 @@ type Collection_Delete_Call struct { // Delete is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docID client.DocID -func (_e *Collection_Expecter) Delete(ctx interface{}, identity interface{}, docID interface{}) *Collection_Delete_Call { - return &Collection_Delete_Call{Call: _e.mock.On("Delete", ctx, identity, docID)} +func (_e *Collection_Expecter) Delete(ctx interface{}, docID interface{}) *Collection_Delete_Call { + return &Collection_Delete_Call{Call: _e.mock.On("Delete", ctx, docID)} } -func (_c *Collection_Delete_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) Run(run func(ctx context.Context, docID client.DocID)) *Collection_Delete_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID)) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } @@ -301,7 +296,7 @@ func (_c *Collection_Delete_Call) Return(_a0 bool, _a1 error) *Collection_Delete return _c } -func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID) (bool, error)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, client.DocID) (bool, error)) *Collection_Delete_Call { _c.Call.Return(run) return _c } @@ -349,25 +344,25 @@ func (_c *Collection_DeleteDocIndex_Call) RunAndReturn(run func(context.Context, return _c } -// DeleteWith provides a mock function with given fields: ctx, identity, target -func (_m *Collection) DeleteWith(ctx context.Context, identity immutable.Option[string], target interface{}) (*client.DeleteResult, error) { - ret := _m.Called(ctx, identity, target) +// DeleteWith provides a mock function with given fields: ctx, target +func (_m *Collection) DeleteWith(ctx context.Context, target interface{}) (*client.DeleteResult, error) { + ret := _m.Called(ctx, target) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}) (*client.DeleteResult, error)); ok { - return rf(ctx, identity, target) + if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { + return rf(ctx, target) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}) *client.DeleteResult); ok { - r0 = rf(ctx, identity, target) + if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { + r0 = rf(ctx, target) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], interface{}) error); ok { - r1 = rf(ctx, identity, target) + if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { + r1 = rf(ctx, target) } else { r1 = ret.Error(1) } @@ -382,15 +377,14 @@ type Collection_DeleteWith_Call struct { // DeleteWith is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - target interface{} -func (_e *Collection_Expecter) DeleteWith(ctx interface{}, identity interface{}, target interface{}) *Collection_DeleteWith_Call { - return &Collection_DeleteWith_Call{Call: _e.mock.On("DeleteWith", ctx, identity, target)} +func (_e *Collection_Expecter) DeleteWith(ctx interface{}, target interface{}) *Collection_DeleteWith_Call { + return &Collection_DeleteWith_Call{Call: _e.mock.On("DeleteWith", ctx, target)} } -func (_c *Collection_DeleteWith_Call) Run(run func(ctx context.Context, identity immutable.Option[string], target interface{})) *Collection_DeleteWith_Call { +func (_c *Collection_DeleteWith_Call) Run(run func(ctx context.Context, target interface{})) *Collection_DeleteWith_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(interface{})) + run(args[0].(context.Context), args[1].(interface{})) }) return _c } @@ -400,30 +394,30 @@ func (_c *Collection_DeleteWith_Call) Return(_a0 *client.DeleteResult, _a1 error return _c } -func (_c *Collection_DeleteWith_Call) RunAndReturn(run func(context.Context, immutable.Option[string], interface{}) (*client.DeleteResult, error)) *Collection_DeleteWith_Call { +func (_c *Collection_DeleteWith_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWith_Call { _c.Call.Return(run) return _c } -// DeleteWithDocID provides a mock function with given fields: ctx, identity, docID -func (_m *Collection) DeleteWithDocID(ctx context.Context, identity immutable.Option[string], docID client.DocID) (*client.DeleteResult, error) { - ret := _m.Called(ctx, identity, docID) +// DeleteWithDocID provides a mock function with given fields: ctx, docID +func (_m *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { + ret := _m.Called(ctx, docID) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) (*client.DeleteResult, error)); ok { - return rf(ctx, identity, docID) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (*client.DeleteResult, error)); ok { + return rf(ctx, docID) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) *client.DeleteResult); ok { - r0 = rf(ctx, identity, docID) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) *client.DeleteResult); ok { + r0 = rf(ctx, docID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID) error); ok { - r1 = rf(ctx, identity, docID) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { + r1 = rf(ctx, docID) } else { r1 = ret.Error(1) } @@ -438,15 +432,14 @@ type Collection_DeleteWithDocID_Call struct { // DeleteWithDocID is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docID client.DocID -func (_e *Collection_Expecter) DeleteWithDocID(ctx interface{}, identity interface{}, docID interface{}) *Collection_DeleteWithDocID_Call { - return &Collection_DeleteWithDocID_Call{Call: _e.mock.On("DeleteWithDocID", ctx, identity, docID)} +func (_e *Collection_Expecter) DeleteWithDocID(ctx interface{}, docID interface{}) *Collection_DeleteWithDocID_Call { + return &Collection_DeleteWithDocID_Call{Call: _e.mock.On("DeleteWithDocID", ctx, docID)} } -func (_c *Collection_DeleteWithDocID_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID)) *Collection_DeleteWithDocID_Call { +func (_c *Collection_DeleteWithDocID_Call) Run(run func(ctx context.Context, docID client.DocID)) *Collection_DeleteWithDocID_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID)) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } @@ -456,30 +449,30 @@ func (_c *Collection_DeleteWithDocID_Call) Return(_a0 *client.DeleteResult, _a1 return _c } -func (_c *Collection_DeleteWithDocID_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocID_Call { +func (_c *Collection_DeleteWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocID_Call { _c.Call.Return(run) return _c } -// DeleteWithDocIDs provides a mock function with given fields: ctx, identity, docIDs -func (_m *Collection) DeleteWithDocIDs(ctx context.Context, identity immutable.Option[string], docIDs []client.DocID) (*client.DeleteResult, error) { - ret := _m.Called(ctx, identity, docIDs) +// DeleteWithDocIDs provides a mock function with given fields: ctx, docIDs +func (_m *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { + ret := _m.Called(ctx, docIDs) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []client.DocID) (*client.DeleteResult, error)); ok { - return rf(ctx, identity, docIDs) + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) (*client.DeleteResult, error)); ok { + return rf(ctx, docIDs) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []client.DocID) *client.DeleteResult); ok { - r0 = rf(ctx, identity, docIDs) + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) *client.DeleteResult); ok { + r0 = rf(ctx, docIDs) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], []client.DocID) error); ok { - r1 = rf(ctx, identity, docIDs) + if rf, ok := ret.Get(1).(func(context.Context, []client.DocID) error); ok { + r1 = rf(ctx, docIDs) } else { r1 = ret.Error(1) } @@ -494,15 +487,14 @@ type Collection_DeleteWithDocIDs_Call struct { // DeleteWithDocIDs is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docIDs []client.DocID -func (_e *Collection_Expecter) DeleteWithDocIDs(ctx interface{}, identity interface{}, docIDs interface{}) *Collection_DeleteWithDocIDs_Call { - return &Collection_DeleteWithDocIDs_Call{Call: _e.mock.On("DeleteWithDocIDs", ctx, identity, docIDs)} +func (_e *Collection_Expecter) DeleteWithDocIDs(ctx interface{}, docIDs interface{}) *Collection_DeleteWithDocIDs_Call { + return &Collection_DeleteWithDocIDs_Call{Call: _e.mock.On("DeleteWithDocIDs", ctx, docIDs)} } -func (_c *Collection_DeleteWithDocIDs_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docIDs []client.DocID)) *Collection_DeleteWithDocIDs_Call { +func (_c *Collection_DeleteWithDocIDs_Call) Run(run func(ctx context.Context, docIDs []client.DocID)) *Collection_DeleteWithDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].([]client.DocID)) + run(args[0].(context.Context), args[1].([]client.DocID)) }) return _c } @@ -512,30 +504,30 @@ func (_c *Collection_DeleteWithDocIDs_Call) Return(_a0 *client.DeleteResult, _a1 return _c } -func (_c *Collection_DeleteWithDocIDs_Call) RunAndReturn(run func(context.Context, immutable.Option[string], []client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocIDs_Call { +func (_c *Collection_DeleteWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocIDs_Call { _c.Call.Return(run) return _c } -// DeleteWithFilter provides a mock function with given fields: ctx, identity, filter -func (_m *Collection) DeleteWithFilter(ctx context.Context, identity immutable.Option[string], filter interface{}) (*client.DeleteResult, error) { - ret := _m.Called(ctx, identity, filter) +// DeleteWithFilter provides a mock function with given fields: ctx, filter +func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) (*client.DeleteResult, error) { + ret := _m.Called(ctx, filter) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}) (*client.DeleteResult, error)); ok { - return rf(ctx, identity, filter) + if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { + return rf(ctx, filter) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}) *client.DeleteResult); ok { - r0 = rf(ctx, identity, filter) + if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { + r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], interface{}) error); ok { - r1 = rf(ctx, identity, filter) + if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { + r1 = rf(ctx, filter) } else { r1 = ret.Error(1) } @@ -550,15 +542,14 @@ type Collection_DeleteWithFilter_Call struct { // DeleteWithFilter is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - filter interface{} -func (_e *Collection_Expecter) DeleteWithFilter(ctx interface{}, identity interface{}, filter interface{}) *Collection_DeleteWithFilter_Call { - return &Collection_DeleteWithFilter_Call{Call: _e.mock.On("DeleteWithFilter", ctx, identity, filter)} +func (_e *Collection_Expecter) DeleteWithFilter(ctx interface{}, filter interface{}) *Collection_DeleteWithFilter_Call { + return &Collection_DeleteWithFilter_Call{Call: _e.mock.On("DeleteWithFilter", ctx, filter)} } -func (_c *Collection_DeleteWithFilter_Call) Run(run func(ctx context.Context, identity immutable.Option[string], filter interface{})) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithFilter_Call) Run(run func(ctx context.Context, filter interface{})) *Collection_DeleteWithFilter_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(interface{})) + run(args[0].(context.Context), args[1].(interface{})) }) return _c } @@ -568,7 +559,7 @@ func (_c *Collection_DeleteWithFilter_Call) Return(_a0 *client.DeleteResult, _a1 return _c } -func (_c *Collection_DeleteWithFilter_Call) RunAndReturn(run func(context.Context, immutable.Option[string], interface{}) (*client.DeleteResult, error)) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithFilter_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWithFilter_Call { _c.Call.Return(run) return _c } @@ -657,23 +648,23 @@ func (_c *Collection_DropIndex_Call) RunAndReturn(run func(context.Context, stri return _c } -// Exists provides a mock function with given fields: ctx, identity, docID -func (_m *Collection) Exists(ctx context.Context, identity immutable.Option[string], docID client.DocID) (bool, error) { - ret := _m.Called(ctx, identity, docID) +// Exists provides a mock function with given fields: ctx, docID +func (_m *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { + ret := _m.Called(ctx, docID) var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) (bool, error)); ok { - return rf(ctx, identity, docID) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { + return rf(ctx, docID) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID) bool); ok { - r0 = rf(ctx, identity, docID) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { + r0 = rf(ctx, docID) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID) error); ok { - r1 = rf(ctx, identity, docID) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { + r1 = rf(ctx, docID) } else { r1 = ret.Error(1) } @@ -688,15 +679,14 @@ type Collection_Exists_Call struct { // Exists is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docID client.DocID -func (_e *Collection_Expecter) Exists(ctx interface{}, identity interface{}, docID interface{}) *Collection_Exists_Call { - return &Collection_Exists_Call{Call: _e.mock.On("Exists", ctx, identity, docID)} +func (_e *Collection_Expecter) Exists(ctx interface{}, docID interface{}) *Collection_Exists_Call { + return &Collection_Exists_Call{Call: _e.mock.On("Exists", ctx, docID)} } -func (_c *Collection_Exists_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) Run(run func(ctx context.Context, docID client.DocID)) *Collection_Exists_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID)) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } @@ -706,30 +696,30 @@ func (_c *Collection_Exists_Call) Return(_a0 bool, _a1 error) *Collection_Exists return _c } -func (_c *Collection_Exists_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID) (bool, error)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) RunAndReturn(run func(context.Context, client.DocID) (bool, error)) *Collection_Exists_Call { _c.Call.Return(run) return _c } -// Get provides a mock function with given fields: ctx, identity, docID, showDeleted -func (_m *Collection) Get(ctx context.Context, identity immutable.Option[string], docID client.DocID, showDeleted bool) (*client.Document, error) { - ret := _m.Called(ctx, identity, docID, showDeleted) +// Get provides a mock function with given fields: ctx, docID, showDeleted +func (_m *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { + ret := _m.Called(ctx, docID, showDeleted) var r0 *client.Document var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID, bool) (*client.Document, error)); ok { - return rf(ctx, identity, docID, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, bool) (*client.Document, error)); ok { + return rf(ctx, docID, showDeleted) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID, bool) *client.Document); ok { - r0 = rf(ctx, identity, docID, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, bool) *client.Document); ok { + r0 = rf(ctx, docID, showDeleted) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.Document) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID, bool) error); ok { - r1 = rf(ctx, identity, docID, showDeleted) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID, bool) error); ok { + r1 = rf(ctx, docID, showDeleted) } else { r1 = ret.Error(1) } @@ -744,16 +734,15 @@ type Collection_Get_Call struct { // Get is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docID client.DocID // - showDeleted bool -func (_e *Collection_Expecter) Get(ctx interface{}, identity interface{}, docID interface{}, showDeleted interface{}) *Collection_Get_Call { - return &Collection_Get_Call{Call: _e.mock.On("Get", ctx, identity, docID, showDeleted)} +func (_e *Collection_Expecter) Get(ctx interface{}, docID interface{}, showDeleted interface{}) *Collection_Get_Call { + return &Collection_Get_Call{Call: _e.mock.On("Get", ctx, docID, showDeleted)} } -func (_c *Collection_Get_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID, showDeleted bool)) *Collection_Get_Call { +func (_c *Collection_Get_Call) Run(run func(ctx context.Context, docID client.DocID, showDeleted bool)) *Collection_Get_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID), args[3].(bool)) + run(args[0].(context.Context), args[1].(client.DocID), args[2].(bool)) }) return _c } @@ -763,30 +752,30 @@ func (_c *Collection_Get_Call) Return(_a0 *client.Document, _a1 error) *Collecti return _c } -func (_c *Collection_Get_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID, bool) (*client.Document, error)) *Collection_Get_Call { +func (_c *Collection_Get_Call) RunAndReturn(run func(context.Context, client.DocID, bool) (*client.Document, error)) *Collection_Get_Call { _c.Call.Return(run) return _c } -// GetAllDocIDs provides a mock function with given fields: ctx, identity -func (_m *Collection) GetAllDocIDs(ctx context.Context, identity immutable.Option[string]) (<-chan client.DocIDResult, error) { - ret := _m.Called(ctx, identity) +// GetAllDocIDs provides a mock function with given fields: ctx +func (_m *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { + ret := _m.Called(ctx) var r0 <-chan client.DocIDResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string]) (<-chan client.DocIDResult, error)); ok { - return rf(ctx, identity) + if rf, ok := ret.Get(0).(func(context.Context) (<-chan client.DocIDResult, error)); ok { + return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string]) <-chan client.DocIDResult); ok { - r0 = rf(ctx, identity) + if rf, ok := ret.Get(0).(func(context.Context) <-chan client.DocIDResult); ok { + r0 = rf(ctx) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(<-chan client.DocIDResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string]) error); ok { - r1 = rf(ctx, identity) + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) } else { r1 = ret.Error(1) } @@ -801,14 +790,13 @@ type Collection_GetAllDocIDs_Call struct { // GetAllDocIDs is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] -func (_e *Collection_Expecter) GetAllDocIDs(ctx interface{}, identity interface{}) *Collection_GetAllDocIDs_Call { - return &Collection_GetAllDocIDs_Call{Call: _e.mock.On("GetAllDocIDs", ctx, identity)} +func (_e *Collection_Expecter) GetAllDocIDs(ctx interface{}) *Collection_GetAllDocIDs_Call { + return &Collection_GetAllDocIDs_Call{Call: _e.mock.On("GetAllDocIDs", ctx)} } -func (_c *Collection_GetAllDocIDs_Call) Run(run func(ctx context.Context, identity immutable.Option[string])) *Collection_GetAllDocIDs_Call { +func (_c *Collection_GetAllDocIDs_Call) Run(run func(ctx context.Context)) *Collection_GetAllDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string])) + run(args[0].(context.Context)) }) return _c } @@ -818,7 +806,7 @@ func (_c *Collection_GetAllDocIDs_Call) Return(_a0 <-chan client.DocIDResult, _a return _c } -func (_c *Collection_GetAllDocIDs_Call) RunAndReturn(run func(context.Context, immutable.Option[string]) (<-chan client.DocIDResult, error)) *Collection_GetAllDocIDs_Call { +func (_c *Collection_GetAllDocIDs_Call) RunAndReturn(run func(context.Context) (<-chan client.DocIDResult, error)) *Collection_GetAllDocIDs_Call { _c.Call.Return(run) return _c } @@ -959,13 +947,13 @@ func (_c *Collection_Name_Call) RunAndReturn(run func() immutable.Option[string] return _c } -// Save provides a mock function with given fields: ctx, identity, doc -func (_m *Collection) Save(ctx context.Context, identity immutable.Option[string], doc *client.Document) error { - ret := _m.Called(ctx, identity, doc) +// Save provides a mock function with given fields: ctx, doc +func (_m *Collection) Save(ctx context.Context, doc *client.Document) error { + ret := _m.Called(ctx, doc) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], *client.Document) error); ok { - r0 = rf(ctx, identity, doc) + if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { + r0 = rf(ctx, doc) } else { r0 = ret.Error(0) } @@ -980,15 +968,14 @@ type Collection_Save_Call struct { // Save is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - doc *client.Document -func (_e *Collection_Expecter) Save(ctx interface{}, identity interface{}, doc interface{}) *Collection_Save_Call { - return &Collection_Save_Call{Call: _e.mock.On("Save", ctx, identity, doc)} +func (_e *Collection_Expecter) Save(ctx interface{}, doc interface{}) *Collection_Save_Call { + return &Collection_Save_Call{Call: _e.mock.On("Save", ctx, doc)} } -func (_c *Collection_Save_Call) Run(run func(ctx context.Context, identity immutable.Option[string], doc *client.Document)) *Collection_Save_Call { +func (_c *Collection_Save_Call) Run(run func(ctx context.Context, doc *client.Document)) *Collection_Save_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(*client.Document)) + run(args[0].(context.Context), args[1].(*client.Document)) }) return _c } @@ -998,7 +985,7 @@ func (_c *Collection_Save_Call) Return(_a0 error) *Collection_Save_Call { return _c } -func (_c *Collection_Save_Call) RunAndReturn(run func(context.Context, immutable.Option[string], *client.Document) error) *Collection_Save_Call { +func (_c *Collection_Save_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_Save_Call { _c.Call.Return(run) return _c } @@ -1085,13 +1072,13 @@ func (_c *Collection_SchemaRoot_Call) RunAndReturn(run func() string) *Collectio return _c } -// Update provides a mock function with given fields: ctx, identity, docs -func (_m *Collection) Update(ctx context.Context, identity immutable.Option[string], docs *client.Document) error { - ret := _m.Called(ctx, identity, docs) +// Update provides a mock function with given fields: ctx, docs +func (_m *Collection) Update(ctx context.Context, docs *client.Document) error { + ret := _m.Called(ctx, docs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], *client.Document) error); ok { - r0 = rf(ctx, identity, docs) + if rf, ok := ret.Get(0).(func(context.Context, *client.Document) error); ok { + r0 = rf(ctx, docs) } else { r0 = ret.Error(0) } @@ -1106,15 +1093,14 @@ type Collection_Update_Call struct { // Update is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docs *client.Document -func (_e *Collection_Expecter) Update(ctx interface{}, identity interface{}, docs interface{}) *Collection_Update_Call { - return &Collection_Update_Call{Call: _e.mock.On("Update", ctx, identity, docs)} +func (_e *Collection_Expecter) Update(ctx interface{}, docs interface{}) *Collection_Update_Call { + return &Collection_Update_Call{Call: _e.mock.On("Update", ctx, docs)} } -func (_c *Collection_Update_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docs *client.Document)) *Collection_Update_Call { +func (_c *Collection_Update_Call) Run(run func(ctx context.Context, docs *client.Document)) *Collection_Update_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(*client.Document)) + run(args[0].(context.Context), args[1].(*client.Document)) }) return _c } @@ -1124,7 +1110,7 @@ func (_c *Collection_Update_Call) Return(_a0 error) *Collection_Update_Call { return _c } -func (_c *Collection_Update_Call) RunAndReturn(run func(context.Context, immutable.Option[string], *client.Document) error) *Collection_Update_Call { +func (_c *Collection_Update_Call) RunAndReturn(run func(context.Context, *client.Document) error) *Collection_Update_Call { _c.Call.Return(run) return _c } @@ -1173,25 +1159,25 @@ func (_c *Collection_UpdateDocIndex_Call) RunAndReturn(run func(context.Context, return _c } -// UpdateWith provides a mock function with given fields: ctx, identity, target, updater -func (_m *Collection) UpdateWith(ctx context.Context, identity immutable.Option[string], target interface{}, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, identity, target, updater) +// UpdateWith provides a mock function with given fields: ctx, target, updater +func (_m *Collection) UpdateWith(ctx context.Context, target interface{}, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, target, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}, string) (*client.UpdateResult, error)); ok { - return rf(ctx, identity, target, updater) + if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { + return rf(ctx, target, updater) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}, string) *client.UpdateResult); ok { - r0 = rf(ctx, identity, target, updater) + if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { + r0 = rf(ctx, target, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], interface{}, string) error); ok { - r1 = rf(ctx, identity, target, updater) + if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { + r1 = rf(ctx, target, updater) } else { r1 = ret.Error(1) } @@ -1206,16 +1192,15 @@ type Collection_UpdateWith_Call struct { // UpdateWith is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - target interface{} // - updater string -func (_e *Collection_Expecter) UpdateWith(ctx interface{}, identity interface{}, target interface{}, updater interface{}) *Collection_UpdateWith_Call { - return &Collection_UpdateWith_Call{Call: _e.mock.On("UpdateWith", ctx, identity, target, updater)} +func (_e *Collection_Expecter) UpdateWith(ctx interface{}, target interface{}, updater interface{}) *Collection_UpdateWith_Call { + return &Collection_UpdateWith_Call{Call: _e.mock.On("UpdateWith", ctx, target, updater)} } -func (_c *Collection_UpdateWith_Call) Run(run func(ctx context.Context, identity immutable.Option[string], target interface{}, updater string)) *Collection_UpdateWith_Call { +func (_c *Collection_UpdateWith_Call) Run(run func(ctx context.Context, target interface{}, updater string)) *Collection_UpdateWith_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(interface{}), args[3].(string)) + run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) }) return _c } @@ -1225,30 +1210,30 @@ func (_c *Collection_UpdateWith_Call) Return(_a0 *client.UpdateResult, _a1 error return _c } -func (_c *Collection_UpdateWith_Call) RunAndReturn(run func(context.Context, immutable.Option[string], interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWith_Call { +func (_c *Collection_UpdateWith_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWith_Call { _c.Call.Return(run) return _c } -// UpdateWithDocID provides a mock function with given fields: ctx, identity, docID, updater -func (_m *Collection) UpdateWithDocID(ctx context.Context, identity immutable.Option[string], docID client.DocID, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, identity, docID, updater) +// UpdateWithDocID provides a mock function with given fields: ctx, docID, updater +func (_m *Collection) UpdateWithDocID(ctx context.Context, docID client.DocID, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, docID, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID, string) (*client.UpdateResult, error)); ok { - return rf(ctx, identity, docID, updater) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) (*client.UpdateResult, error)); ok { + return rf(ctx, docID, updater) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], client.DocID, string) *client.UpdateResult); ok { - r0 = rf(ctx, identity, docID, updater) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) *client.UpdateResult); ok { + r0 = rf(ctx, docID, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], client.DocID, string) error); ok { - r1 = rf(ctx, identity, docID, updater) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID, string) error); ok { + r1 = rf(ctx, docID, updater) } else { r1 = ret.Error(1) } @@ -1263,16 +1248,15 @@ type Collection_UpdateWithDocID_Call struct { // UpdateWithDocID is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docID client.DocID // - updater string -func (_e *Collection_Expecter) UpdateWithDocID(ctx interface{}, identity interface{}, docID interface{}, updater interface{}) *Collection_UpdateWithDocID_Call { - return &Collection_UpdateWithDocID_Call{Call: _e.mock.On("UpdateWithDocID", ctx, identity, docID, updater)} +func (_e *Collection_Expecter) UpdateWithDocID(ctx interface{}, docID interface{}, updater interface{}) *Collection_UpdateWithDocID_Call { + return &Collection_UpdateWithDocID_Call{Call: _e.mock.On("UpdateWithDocID", ctx, docID, updater)} } -func (_c *Collection_UpdateWithDocID_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docID client.DocID, updater string)) *Collection_UpdateWithDocID_Call { +func (_c *Collection_UpdateWithDocID_Call) Run(run func(ctx context.Context, docID client.DocID, updater string)) *Collection_UpdateWithDocID_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(client.DocID), args[3].(string)) + run(args[0].(context.Context), args[1].(client.DocID), args[2].(string)) }) return _c } @@ -1282,30 +1266,30 @@ func (_c *Collection_UpdateWithDocID_Call) Return(_a0 *client.UpdateResult, _a1 return _c } -func (_c *Collection_UpdateWithDocID_Call) RunAndReturn(run func(context.Context, immutable.Option[string], client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocID_Call { +func (_c *Collection_UpdateWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocID_Call { _c.Call.Return(run) return _c } -// UpdateWithDocIDs provides a mock function with given fields: ctx, identity, docIDs, updater -func (_m *Collection) UpdateWithDocIDs(ctx context.Context, identity immutable.Option[string], docIDs []client.DocID, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, identity, docIDs, updater) +// UpdateWithDocIDs provides a mock function with given fields: ctx, docIDs, updater +func (_m *Collection) UpdateWithDocIDs(ctx context.Context, docIDs []client.DocID, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, docIDs, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []client.DocID, string) (*client.UpdateResult, error)); ok { - return rf(ctx, identity, docIDs, updater) + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) (*client.UpdateResult, error)); ok { + return rf(ctx, docIDs, updater) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], []client.DocID, string) *client.UpdateResult); ok { - r0 = rf(ctx, identity, docIDs, updater) + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) *client.UpdateResult); ok { + r0 = rf(ctx, docIDs, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], []client.DocID, string) error); ok { - r1 = rf(ctx, identity, docIDs, updater) + if rf, ok := ret.Get(1).(func(context.Context, []client.DocID, string) error); ok { + r1 = rf(ctx, docIDs, updater) } else { r1 = ret.Error(1) } @@ -1320,16 +1304,15 @@ type Collection_UpdateWithDocIDs_Call struct { // UpdateWithDocIDs is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - docIDs []client.DocID // - updater string -func (_e *Collection_Expecter) UpdateWithDocIDs(ctx interface{}, identity interface{}, docIDs interface{}, updater interface{}) *Collection_UpdateWithDocIDs_Call { - return &Collection_UpdateWithDocIDs_Call{Call: _e.mock.On("UpdateWithDocIDs", ctx, identity, docIDs, updater)} +func (_e *Collection_Expecter) UpdateWithDocIDs(ctx interface{}, docIDs interface{}, updater interface{}) *Collection_UpdateWithDocIDs_Call { + return &Collection_UpdateWithDocIDs_Call{Call: _e.mock.On("UpdateWithDocIDs", ctx, docIDs, updater)} } -func (_c *Collection_UpdateWithDocIDs_Call) Run(run func(ctx context.Context, identity immutable.Option[string], docIDs []client.DocID, updater string)) *Collection_UpdateWithDocIDs_Call { +func (_c *Collection_UpdateWithDocIDs_Call) Run(run func(ctx context.Context, docIDs []client.DocID, updater string)) *Collection_UpdateWithDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].([]client.DocID), args[3].(string)) + run(args[0].(context.Context), args[1].([]client.DocID), args[2].(string)) }) return _c } @@ -1339,30 +1322,30 @@ func (_c *Collection_UpdateWithDocIDs_Call) Return(_a0 *client.UpdateResult, _a1 return _c } -func (_c *Collection_UpdateWithDocIDs_Call) RunAndReturn(run func(context.Context, immutable.Option[string], []client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocIDs_Call { +func (_c *Collection_UpdateWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocIDs_Call { _c.Call.Return(run) return _c } -// UpdateWithFilter provides a mock function with given fields: ctx, identity, filter, updater -func (_m *Collection) UpdateWithFilter(ctx context.Context, identity immutable.Option[string], filter interface{}, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, identity, filter, updater) +// UpdateWithFilter provides a mock function with given fields: ctx, filter, updater +func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, filter, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}, string) (*client.UpdateResult, error)); ok { - return rf(ctx, identity, filter, updater) + if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { + return rf(ctx, filter, updater) } - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], interface{}, string) *client.UpdateResult); ok { - r0 = rf(ctx, identity, filter, updater) + if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { + r0 = rf(ctx, filter, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, immutable.Option[string], interface{}, string) error); ok { - r1 = rf(ctx, identity, filter, updater) + if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { + r1 = rf(ctx, filter, updater) } else { r1 = ret.Error(1) } @@ -1377,16 +1360,15 @@ type Collection_UpdateWithFilter_Call struct { // UpdateWithFilter is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - filter interface{} // - updater string -func (_e *Collection_Expecter) UpdateWithFilter(ctx interface{}, identity interface{}, filter interface{}, updater interface{}) *Collection_UpdateWithFilter_Call { - return &Collection_UpdateWithFilter_Call{Call: _e.mock.On("UpdateWithFilter", ctx, identity, filter, updater)} +func (_e *Collection_Expecter) UpdateWithFilter(ctx interface{}, filter interface{}, updater interface{}) *Collection_UpdateWithFilter_Call { + return &Collection_UpdateWithFilter_Call{Call: _e.mock.On("UpdateWithFilter", ctx, filter, updater)} } -func (_c *Collection_UpdateWithFilter_Call) Run(run func(ctx context.Context, identity immutable.Option[string], filter interface{}, updater string)) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithFilter_Call) Run(run func(ctx context.Context, filter interface{}, updater string)) *Collection_UpdateWithFilter_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(interface{}), args[3].(string)) + run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) }) return _c } @@ -1396,51 +1378,7 @@ func (_c *Collection_UpdateWithFilter_Call) Return(_a0 *client.UpdateResult, _a1 return _c } -func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Context, immutable.Option[string], interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWithFilter_Call { - _c.Call.Return(run) - return _c -} - -// WithTxn provides a mock function with given fields: _a0 -func (_m *Collection) WithTxn(_a0 datastore.Txn) client.Collection { - ret := _m.Called(_a0) - - var r0 client.Collection - if rf, ok := ret.Get(0).(func(datastore.Txn) client.Collection); ok { - r0 = rf(_a0) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(client.Collection) - } - } - - return r0 -} - -// Collection_WithTxn_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WithTxn' -type Collection_WithTxn_Call struct { - *mock.Call -} - -// WithTxn is a helper method to define mock.On call -// - _a0 datastore.Txn -func (_e *Collection_Expecter) WithTxn(_a0 interface{}) *Collection_WithTxn_Call { - return &Collection_WithTxn_Call{Call: _e.mock.On("WithTxn", _a0)} -} - -func (_c *Collection_WithTxn_Call) Run(run func(_a0 datastore.Txn)) *Collection_WithTxn_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(datastore.Txn)) - }) - return _c -} - -func (_c *Collection_WithTxn_Call) Return(_a0 client.Collection) *Collection_WithTxn_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *Collection_WithTxn_Call) RunAndReturn(run func(datastore.Txn) client.Collection) *Collection_WithTxn_Call { +func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWithFilter_Call { _c.Call.Return(run) return _c } diff --git a/client/mocks/db.go b/client/mocks/db.go index c31578e190..31c44c1241 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -400,13 +400,13 @@ func (_c *DB_Events_Call) RunAndReturn(run func() events.Events) *DB_Events_Call return _c } -// ExecRequest provides a mock function with given fields: ctx, identity, request -func (_m *DB) ExecRequest(ctx context.Context, identity immutable.Option[string], request string) *client.RequestResult { - ret := _m.Called(ctx, identity, request) +// ExecRequest provides a mock function with given fields: ctx, request +func (_m *DB) ExecRequest(ctx context.Context, request string) *client.RequestResult { + ret := _m.Called(ctx, request) var r0 *client.RequestResult - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], string) *client.RequestResult); ok { - r0 = rf(ctx, identity, request) + if rf, ok := ret.Get(0).(func(context.Context, string) *client.RequestResult); ok { + r0 = rf(ctx, request) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.RequestResult) @@ -423,15 +423,14 @@ type DB_ExecRequest_Call struct { // ExecRequest is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] // - request string -func (_e *DB_Expecter) ExecRequest(ctx interface{}, identity interface{}, request interface{}) *DB_ExecRequest_Call { - return &DB_ExecRequest_Call{Call: _e.mock.On("ExecRequest", ctx, identity, request)} +func (_e *DB_Expecter) ExecRequest(ctx interface{}, request interface{}) *DB_ExecRequest_Call { + return &DB_ExecRequest_Call{Call: _e.mock.On("ExecRequest", ctx, request)} } -func (_c *DB_ExecRequest_Call) Run(run func(ctx context.Context, identity immutable.Option[string], request string)) *DB_ExecRequest_Call { +func (_c *DB_ExecRequest_Call) Run(run func(ctx context.Context, request string)) *DB_ExecRequest_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(string)) + run(args[0].(context.Context), args[1].(string)) }) return _c } @@ -441,7 +440,7 @@ func (_c *DB_ExecRequest_Call) Return(_a0 *client.RequestResult) *DB_ExecRequest return _c } -func (_c *DB_ExecRequest_Call) RunAndReturn(run func(context.Context, immutable.Option[string], string) *client.RequestResult) *DB_ExecRequest_Call { +func (_c *DB_ExecRequest_Call) RunAndReturn(run func(context.Context, string) *client.RequestResult) *DB_ExecRequest_Call { _c.Call.Return(run) return _c } @@ -1214,50 +1213,6 @@ func (_c *DB_SetMigration_Call) RunAndReturn(run func(context.Context, client.Le return _c } -// WithTxn provides a mock function with given fields: _a0 -func (_m *DB) WithTxn(_a0 datastore.Txn) client.Store { - ret := _m.Called(_a0) - - var r0 client.Store - if rf, ok := ret.Get(0).(func(datastore.Txn) client.Store); ok { - r0 = rf(_a0) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(client.Store) - } - } - - return r0 -} - -// DB_WithTxn_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WithTxn' -type DB_WithTxn_Call struct { - *mock.Call -} - -// WithTxn is a helper method to define mock.On call -// - _a0 datastore.Txn -func (_e *DB_Expecter) WithTxn(_a0 interface{}) *DB_WithTxn_Call { - return &DB_WithTxn_Call{Call: _e.mock.On("WithTxn", _a0)} -} - -func (_c *DB_WithTxn_Call) Run(run func(_a0 datastore.Txn)) *DB_WithTxn_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(datastore.Txn)) - }) - return _c -} - -func (_c *DB_WithTxn_Call) Return(_a0 client.Store) *DB_WithTxn_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *DB_WithTxn_Call) RunAndReturn(run func(datastore.Txn) client.Store) *DB_WithTxn_Call { - _c.Call.Return(run) - return _c -} - // NewDB creates a new instance of DB. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewDB(t interface { diff --git a/db/backup.go b/db/backup.go index 4c72797b0e..cc2f732d7e 100644 --- a/db/backup.go +++ b/db/backup.go @@ -17,7 +17,6 @@ import ( "fmt" "os" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" ) @@ -90,8 +89,7 @@ func (db *db) basicImport(ctx context.Context, filepath string) (err error) { return NewErrDocFromMap(err) } - // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to backup - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) if err != nil { return NewErrDocCreate(err) } @@ -102,8 +100,7 @@ func (db *db) basicImport(ctx context.Context, filepath string) (err error) { if err != nil { return NewErrDocUpdate(err) } - // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to backup - err = col.Update(ctx, acpIdentity.NoIdentity, doc) + err = col.Update(ctx, doc) if err != nil { return NewErrDocUpdate(err) } @@ -190,8 +187,7 @@ func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err if err != nil { return err } - // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to export - docIDsCh, err := col.GetAllDocIDs(ctx, acpIdentity.NoIdentity) + docIDsCh, err := col.GetAllDocIDs(ctx) if err != nil { return err } @@ -207,8 +203,7 @@ func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err return err } } - // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - Add identity ability to export - doc, err := col.Get(ctx, acpIdentity.NoIdentity, docResultWithID.ID, false) + doc, err := col.Get(ctx, docResultWithID.ID, false) if err != nil { return err } @@ -240,8 +235,7 @@ func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err if err != nil { return err } - // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2430 - foreignDoc, err := foreignCol.Get(ctx, acpIdentity.NoIdentity, foreignDocID, false) + foreignDoc, err := foreignCol.Get(ctx, foreignDocID, false) if err != nil { err := doc.Set(field.Name+request.RelatedObjectID, nil) if err != nil { diff --git a/db/backup_test.go b/db/backup_test.go index 968415f3b3..ec9bc947b5 100644 --- a/db/backup_test.go +++ b/db/backup_test.go @@ -47,10 +47,10 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) + err = col1.Create(ctx, doc1) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) + err = col1.Create(ctx, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Address") @@ -59,14 +59,15 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) + err = col2.Create(ctx, doc3) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) ctx = SetContextTxn(ctx, txn) - defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath}) @@ -111,10 +112,10 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) + err = col1.Create(ctx, doc1) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) + err = col1.Create(ctx, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Address") @@ -123,14 +124,15 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) + err = col2.Create(ctx, doc3) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) ctx = SetContextTxn(ctx, txn) - defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath, Pretty: true}) @@ -175,10 +177,10 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) + err = col1.Create(ctx, doc1) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) + err = col1.Create(ctx, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Address") @@ -187,14 +189,15 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) + err = col2.Create(ctx, doc3) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) ctx = SetContextTxn(ctx, txn) - defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath, Collections: []string{"Address"}}) @@ -240,10 +243,10 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 31}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) + err = col1.Create(ctx, doc1) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) + err = col1.Create(ctx, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Book") @@ -255,22 +258,23 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { doc4, err := client.NewDocFromJSON([]byte(`{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) + err = col2.Create(ctx, doc3) require.NoError(t, err) - err = col2.Create(ctx, acpIdentity.NoIdentity, doc4) + err = col2.Create(ctx, doc4) require.NoError(t, err) err = doc1.Set("age", 31) require.NoError(t, err) - err = col1.Update(ctx, acpIdentity.NoIdentity, doc1) + err = col1.Update(ctx, doc1) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) ctx = SetContextTxn(ctx, txn) - defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" err = db.basicExport(ctx, &client.BackupConfig{Filepath: filepath}) @@ -315,10 +319,10 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc1) + err = col1.Create(ctx, doc1) require.NoError(t, err) - err = col1.Create(ctx, acpIdentity.NoIdentity, doc2) + err = col1.Create(ctx, doc2) require.NoError(t, err) col2, err := db.GetCollectionByName(ctx, "Address") @@ -327,14 +331,15 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) - err = col2.Create(ctx, acpIdentity.NoIdentity, doc3) + err = col2.Create(ctx, doc3) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) require.NoError(t, err) + defer txn.Discard(ctx) + ctx = SetContextIdentity(ctx, acpIdentity.None) ctx = SetContextTxn(ctx, txn) - defer txn.Discard(ctx) filepath := t.TempDir() + "/test.json" @@ -380,6 +385,8 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { txn, err := db.NewTxn(ctx, false) require.NoError(t, err) + + ctx = SetContextIdentity(ctx, acpIdentity.None) ctx = SetContextTxn(ctx, txn) filepath := t.TempDir() + "/test.json" @@ -398,6 +405,8 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { txn, err = db.NewTxn(ctx, true) require.NoError(t, err) + + ctx = SetContextIdentity(ctx, acpIdentity.None) ctx = SetContextTxn(ctx, txn) col1, err := db.getCollectionByName(ctx, "Address") @@ -405,7 +414,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { key1, err := client.NewDocIDFromString("bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f") require.NoError(t, err) - _, err = col1.Get(ctx, acpIdentity.NoIdentity, key1, false) + _, err = col1.Get(ctx, key1, false) require.NoError(t, err) col2, err := db.getCollectionByName(ctx, "User") @@ -413,12 +422,12 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { key2, err := client.NewDocIDFromString("bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df") require.NoError(t, err) - _, err = col2.Get(ctx, acpIdentity.NoIdentity, key2, false) + _, err = col2.Get(ctx, key2, false) require.NoError(t, err) key3, err := client.NewDocIDFromString("bae-e933420a-988a-56f8-8952-6c245aebd519") require.NoError(t, err) - _, err = col2.Get(ctx, acpIdentity.NoIdentity, key3, false) + _, err = col2.Get(ctx, key3, false) require.NoError(t, err) } diff --git a/db/collection.go b/db/collection.go index faae1bbda7..7b2305364a 100644 --- a/db/collection.go +++ b/db/collection.go @@ -1227,18 +1227,16 @@ func (db *db) getAllActiveDefinitions(ctx context.Context) ([]client.CollectionD // it hits every key and will cause Tx conflicts for concurrent Txs func (c *collection) GetAllDocIDs( ctx context.Context, - identity immutable.Option[string], ) (<-chan client.DocIDResult, error) { ctx, _, err := ensureContextTxn(ctx, c.db, true) if err != nil { return nil, err } - return c.getAllDocIDsChan(ctx, identity) + return c.getAllDocIDsChan(ctx) } func (c *collection) getAllDocIDsChan( ctx context.Context, - identity immutable.Option[string], ) (<-chan client.DocIDResult, error) { txn := mustGetContextTxn(ctx) prefix := core.PrimaryDataStoreKey{ // empty path for all keys prefix @@ -1288,7 +1286,6 @@ func (c *collection) getAllDocIDsChan( canRead, err := c.checkAccessOfDocWithACP( ctx, - identity, acp.ReadPermission, docID.String(), ) @@ -1343,7 +1340,6 @@ func (c *collection) Definition() client.CollectionDefinition { // Will verify the DocID/CID to ensure that the new document is correctly formatted. func (c *collection) Create( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -1352,7 +1348,7 @@ func (c *collection) Create( } defer txn.Discard(ctx) - err = c.create(ctx, identity, doc) + err = c.create(ctx, doc) if err != nil { return err } @@ -1364,7 +1360,6 @@ func (c *collection) Create( // Will verify the DocID/CID to ensure that the new documents are correctly formatted. func (c *collection) CreateMany( ctx context.Context, - identity immutable.Option[string], docs []*client.Document, ) error { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -1374,7 +1369,7 @@ func (c *collection) CreateMany( defer txn.Discard(ctx) for _, doc := range docs { - err = c.create(ctx, identity, doc) + err = c.create(ctx, doc) if err != nil { return err } @@ -1400,7 +1395,6 @@ func (c *collection) getDocIDAndPrimaryKeyFromDoc( func (c *collection) create( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { docID, primaryKey, err := c.getDocIDAndPrimaryKeyFromDoc(doc) @@ -1409,7 +1403,7 @@ func (c *collection) create( } // check if doc already exists - exists, isDeleted, err := c.exists(ctx, identity, primaryKey) + exists, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return err } @@ -1431,7 +1425,7 @@ func (c *collection) create( } // write data to DB via MerkleClock/CRDT - _, err = c.save(ctx, identity, doc, true) + _, err = c.save(ctx, doc, true) if err != nil { return err } @@ -1441,7 +1435,7 @@ func (c *collection) create( return err } - return c.registerDocWithACP(ctx, identity, doc.ID().String()) + return c.registerDocWithACP(ctx, doc.ID().String()) } // Update an existing document with the new values. @@ -1449,7 +1443,6 @@ func (c *collection) create( // Any field that is nil/empty that hasn't called Clear will be ignored. func (c *collection) Update( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -1459,7 +1452,7 @@ func (c *collection) Update( defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) - exists, isDeleted, err := c.exists(ctx, identity, primaryKey) + exists, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return err } @@ -1470,7 +1463,7 @@ func (c *collection) Update( return NewErrDocumentDeleted(primaryKey.DocID) } - err = c.update(ctx, identity, doc) + err = c.update(ctx, doc) if err != nil { return err } @@ -1485,13 +1478,11 @@ func (c *collection) Update( // add to the bloat. func (c *collection) update( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { // Stop the update if the correct permissions aren't there. canUpdate, err := c.checkAccessOfDocWithACP( ctx, - identity, acp.WritePermission, doc.ID().String(), ) @@ -1502,7 +1493,7 @@ func (c *collection) update( return client.ErrDocumentNotFoundOrNotAuthorized } - _, err = c.save(ctx, identity, doc, false) + _, err = c.save(ctx, doc, false) if err != nil { return err } @@ -1513,7 +1504,6 @@ func (c *collection) update( // Either by creating a new document or by updating an existing one func (c *collection) Save( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -1524,7 +1514,7 @@ func (c *collection) Save( // Check if document already exists with primary DS key. primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) - exists, isDeleted, err := c.exists(ctx, identity, primaryKey) + exists, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return err } @@ -1534,9 +1524,9 @@ func (c *collection) Save( } if exists { - err = c.update(ctx, identity, doc) + err = c.update(ctx, doc) } else { - err = c.create(ctx, identity, doc) + err = c.create(ctx, doc) } if err != nil { return err @@ -1550,7 +1540,6 @@ func (c *collection) Save( // save elsewhere could cause the omission of acp checks. func (c *collection) save( ctx context.Context, - identity immutable.Option[string], doc *client.Document, isCreate bool, ) (cid.Cid, error) { @@ -1606,7 +1595,6 @@ func (c *collection) save( err = c.patchPrimaryDoc( ctx, - identity, c.Name().Value(), relationFieldDescription, primaryKey.DocID, @@ -1623,7 +1611,6 @@ func (c *collection) save( err = c.validateOneToOneLinkDoesntAlreadyExist( ctx, - identity, doc.ID().String(), fieldDescription, val.Value(), @@ -1692,7 +1679,6 @@ func (c *collection) save( func (c *collection) validateOneToOneLinkDoesntAlreadyExist( ctx context.Context, - identity immutable.Option[string], docID string, fieldDescription client.FieldDefinition, value any, @@ -1738,7 +1724,7 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( fieldDescription.Name, value, ) - selectionPlan, err := c.makeSelectionPlan(ctx, identity, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, filter) if err != nil { return err } @@ -1792,7 +1778,6 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( // This operation will all state relating to the given DocID. This includes data, block, and head storage. func (c *collection) Delete( ctx context.Context, - identity immutable.Option[string], docID client.DocID, ) (bool, error) { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -1803,7 +1788,7 @@ func (c *collection) Delete( primaryKey := c.getPrimaryKeyFromDocID(docID) - err = c.applyDelete(ctx, identity, primaryKey) + err = c.applyDelete(ctx, primaryKey) if err != nil { return false, err } @@ -1813,7 +1798,6 @@ func (c *collection) Delete( // Exists checks if a given document exists with supplied DocID. func (c *collection) Exists( ctx context.Context, - identity immutable.Option[string], docID client.DocID, ) (bool, error) { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -1823,7 +1807,7 @@ func (c *collection) Exists( defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) - exists, isDeleted, err := c.exists(ctx, identity, primaryKey) + exists, isDeleted, err := c.exists(ctx, primaryKey) if err != nil && !errors.Is(err, ds.ErrNotFound) { return false, err } @@ -1833,12 +1817,10 @@ func (c *collection) Exists( // check if a document exists with the given primary key func (c *collection) exists( ctx context.Context, - identity immutable.Option[string], primaryKey core.PrimaryDataStoreKey, ) (exists bool, isDeleted bool, err error) { canRead, err := c.checkAccessOfDocWithACP( ctx, - identity, acp.ReadPermission, primaryKey.DocID, ) diff --git a/db/collection_acp.go b/db/collection_acp.go index ccb4f3ae32..4a273e907e 100644 --- a/db/collection_acp.go +++ b/db/collection_acp.go @@ -13,8 +13,6 @@ package db import ( "context" - "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/db/permission" ) @@ -32,14 +30,13 @@ import ( // Otherwise, nothing is registered with the acp system. func (c *collection) registerDocWithACP( ctx context.Context, - identity immutable.Option[string], docID string, ) error { // If acp is not available, then no document is registered. if !c.db.acp.HasValue() { return nil } - + identity := GetContextIdentity(ctx) return permission.RegisterDocOnCollectionWithACP( ctx, identity, @@ -51,7 +48,6 @@ func (c *collection) registerDocWithACP( func (c *collection) checkAccessOfDocWithACP( ctx context.Context, - identity immutable.Option[string], dpiPermission acp.DPIPermission, docID string, ) (bool, error) { @@ -59,7 +55,7 @@ func (c *collection) checkAccessOfDocWithACP( if !c.db.acp.HasValue() { return true, nil } - + identity := GetContextIdentity(ctx) return permission.CheckAccessOfDocOnCollectionWithACP( ctx, identity, diff --git a/db/collection_delete.go b/db/collection_delete.go index a6d12399ce..e8bf13b221 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -13,8 +13,6 @@ package db import ( "context" - "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" @@ -32,16 +30,15 @@ import ( // Eg: DeleteWithFilter or DeleteWithDocID func (c *collection) DeleteWith( ctx context.Context, - identity immutable.Option[string], target any, ) (*client.DeleteResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, identity, t) + return c.DeleteWithFilter(ctx, t) case client.DocID: - return c.DeleteWithDocID(ctx, identity, t) + return c.DeleteWithDocID(ctx, t) case []client.DocID: - return c.DeleteWithDocIDs(ctx, identity, t) + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -50,7 +47,6 @@ func (c *collection) DeleteWith( // DeleteWithDocID deletes using a DocID to target a single document for delete. func (c *collection) DeleteWithDocID( ctx context.Context, - identity immutable.Option[string], docID client.DocID, ) (*client.DeleteResult, error) { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -60,7 +56,7 @@ func (c *collection) DeleteWithDocID( defer txn.Discard(ctx) dsKey := c.getPrimaryKeyFromDocID(docID) - res, err := c.deleteWithKey(ctx, identity, dsKey) + res, err := c.deleteWithKey(ctx, dsKey) if err != nil { return nil, err } @@ -71,7 +67,6 @@ func (c *collection) DeleteWithDocID( // DeleteWithDocIDs is the same as DeleteWithDocID but accepts multiple DocIDs as a slice. func (c *collection) DeleteWithDocIDs( ctx context.Context, - identity immutable.Option[string], docIDs []client.DocID, ) (*client.DeleteResult, error) { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -80,7 +75,7 @@ func (c *collection) DeleteWithDocIDs( } defer txn.Discard(ctx) - res, err := c.deleteWithIDs(ctx, identity, docIDs, client.Deleted) + res, err := c.deleteWithIDs(ctx, docIDs, client.Deleted) if err != nil { return nil, err } @@ -91,7 +86,6 @@ func (c *collection) DeleteWithDocIDs( // DeleteWithFilter deletes using a filter to target documents for delete. func (c *collection) DeleteWithFilter( ctx context.Context, - identity immutable.Option[string], filter any, ) (*client.DeleteResult, error) { ctx, txn, err := ensureContextTxn(ctx, c.db, false) @@ -100,7 +94,7 @@ func (c *collection) DeleteWithFilter( } defer txn.Discard(ctx) - res, err := c.deleteWithFilter(ctx, identity, filter, client.Deleted) + res, err := c.deleteWithFilter(ctx, filter, client.Deleted) if err != nil { return nil, err } @@ -110,12 +104,11 @@ func (c *collection) DeleteWithFilter( func (c *collection) deleteWithKey( ctx context.Context, - identity immutable.Option[string], key core.PrimaryDataStoreKey, ) (*client.DeleteResult, error) { // Check the key we have been given to delete with actually has a corresponding // document (i.e. document actually exists in the collection). - err := c.applyDelete(ctx, identity, key) + err := c.applyDelete(ctx, key) if err != nil { return nil, err } @@ -131,7 +124,6 @@ func (c *collection) deleteWithKey( func (c *collection) deleteWithIDs( ctx context.Context, - identity immutable.Option[string], docIDs []client.DocID, _ client.DocumentStatus, ) (*client.DeleteResult, error) { @@ -143,7 +135,7 @@ func (c *collection) deleteWithIDs( primaryKey := c.getPrimaryKeyFromDocID(docID) // Apply the function that will perform the full deletion of this document. - err := c.applyDelete(ctx, identity, primaryKey) + err := c.applyDelete(ctx, primaryKey) if err != nil { return nil, err } @@ -160,12 +152,11 @@ func (c *collection) deleteWithIDs( func (c *collection) deleteWithFilter( ctx context.Context, - identity immutable.Option[string], filter any, _ client.DocumentStatus, ) (*client.DeleteResult, error) { // Make a selection plan that will scan through only the documents with matching filter. - selectionPlan, err := c.makeSelectionPlan(ctx, identity, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, filter) if err != nil { return nil, err } @@ -213,7 +204,7 @@ func (c *collection) deleteWithFilter( } // Delete the document that is associated with this DS key we got from the filter. - err = c.applyDelete(ctx, identity, primaryKey) + err = c.applyDelete(ctx, primaryKey) if err != nil { return nil, err } @@ -229,11 +220,10 @@ func (c *collection) deleteWithFilter( func (c *collection) applyDelete( ctx context.Context, - identity immutable.Option[string], primaryKey core.PrimaryDataStoreKey, ) error { // Must also have read permission to delete, inorder to check if document exists. - found, isDeleted, err := c.exists(ctx, identity, primaryKey) + found, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return err } @@ -247,7 +237,6 @@ func (c *collection) applyDelete( // Stop deletion of document if the correct permissions aren't there. canDelete, err := c.checkAccessOfDocWithACP( ctx, - identity, acp.WritePermission, primaryKey.DocID, ) diff --git a/db/collection_get.go b/db/collection_get.go index 968e6ca761..7f7d82f0ed 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -13,8 +13,6 @@ package db import ( "context" - "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/base" @@ -23,7 +21,6 @@ import ( func (c *collection) Get( ctx context.Context, - identity immutable.Option[string], docID client.DocID, showDeleted bool, ) (*client.Document, error) { @@ -35,7 +32,7 @@ func (c *collection) Get( defer txn.Discard(ctx) primaryKey := c.getPrimaryKeyFromDocID(docID) - found, isDeleted, err := c.exists(ctx, identity, primaryKey) + found, isDeleted, err := c.exists(ctx, primaryKey) if err != nil { return nil, err } @@ -43,7 +40,7 @@ func (c *collection) Get( return nil, client.ErrDocumentNotFoundOrNotAuthorized } - doc, err := c.get(ctx, identity, primaryKey, nil, showDeleted) + doc, err := c.get(ctx, primaryKey, nil, showDeleted) if err != nil { return nil, err } @@ -57,12 +54,12 @@ func (c *collection) Get( func (c *collection) get( ctx context.Context, - identity immutable.Option[string], primaryKey core.PrimaryDataStoreKey, fields []client.FieldDefinition, showDeleted bool, ) (*client.Document, error) { txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) // create a new document fetcher df := c.newFetcher() // initialize it with the primary index diff --git a/db/collection_index.go b/db/collection_index.go index 0c1921dd62..159e40b4e9 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/immutable" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -186,7 +185,6 @@ func (c *collection) updateIndexedDoc( // and handle the case of when oldDoc == nil (will be nil if inaccessible document). oldDoc, err := c.get( ctx, - acpIdentity.NoIdentity, c.getPrimaryKeyFromDocID(doc.ID()), c.Definition().CollectIndexedFields(), false, @@ -325,10 +323,12 @@ func (c *collection) iterateAllDocs( exec func(doc *client.Document) error, ) error { txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) + df := c.newFetcher() err := df.Init( ctx, - acpIdentity.NoIdentity, // TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 - ACP <> Indexing + identity, txn, c.db.acp, c, diff --git a/db/collection_update.go b/db/collection_update.go index 9a8e2bc552..c4d56de158 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -31,17 +31,16 @@ import ( // Eg: UpdateWithFilter or UpdateWithDocID func (c *collection) UpdateWith( ctx context.Context, - identity immutable.Option[string], target any, updater string, ) (*client.UpdateResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, identity, t, updater) + return c.UpdateWithFilter(ctx, t, updater) case client.DocID: - return c.UpdateWithDocID(ctx, identity, t, updater) + return c.UpdateWithDocID(ctx, t, updater) case []client.DocID: - return c.UpdateWithDocIDs(ctx, identity, t, updater) + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -52,7 +51,6 @@ func (c *collection) UpdateWith( // or a parsed Patch, or parsed Merge Patch. func (c *collection) UpdateWithFilter( ctx context.Context, - identity immutable.Option[string], filter any, updater string, ) (*client.UpdateResult, error) { @@ -62,7 +60,7 @@ func (c *collection) UpdateWithFilter( } defer txn.Discard(ctx) - res, err := c.updateWithFilter(ctx, identity, filter, updater) + res, err := c.updateWithFilter(ctx, filter, updater) if err != nil { return nil, err } @@ -74,7 +72,6 @@ func (c *collection) UpdateWithFilter( // or a parsed Patch, or parsed Merge Patch. func (c *collection) UpdateWithDocID( ctx context.Context, - identity immutable.Option[string], docID client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -84,7 +81,7 @@ func (c *collection) UpdateWithDocID( } defer txn.Discard(ctx) - res, err := c.updateWithDocID(ctx, identity, docID, updater) + res, err := c.updateWithDocID(ctx, docID, updater) if err != nil { return nil, err } @@ -97,7 +94,6 @@ func (c *collection) UpdateWithDocID( // or a parsed Patch, or parsed Merge Patch. func (c *collection) UpdateWithDocIDs( ctx context.Context, - identity immutable.Option[string], docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -107,7 +103,7 @@ func (c *collection) UpdateWithDocIDs( } defer txn.Discard(ctx) - res, err := c.updateWithIDs(ctx, identity, docIDs, updater) + res, err := c.updateWithIDs(ctx, docIDs, updater) if err != nil { return nil, err } @@ -117,7 +113,6 @@ func (c *collection) UpdateWithDocIDs( func (c *collection) updateWithDocID( ctx context.Context, - identity immutable.Option[string], docID client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -133,7 +128,7 @@ func (c *collection) updateWithDocID( return nil, client.ErrInvalidUpdater } - doc, err := c.Get(ctx, identity, docID, false) + doc, err := c.Get(ctx, docID, false) if err != nil { return nil, err } @@ -147,7 +142,7 @@ func (c *collection) updateWithDocID( return nil, err } - err = c.update(ctx, identity, doc) + err = c.update(ctx, doc) if err != nil { return nil, err } @@ -161,7 +156,6 @@ func (c *collection) updateWithDocID( func (c *collection) updateWithIDs( ctx context.Context, - identity immutable.Option[string], docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -181,7 +175,7 @@ func (c *collection) updateWithIDs( DocIDs: make([]string, len(docIDs)), } for i, docIDs := range docIDs { - doc, err := c.Get(ctx, identity, docIDs, false) + doc, err := c.Get(ctx, docIDs, false) if err != nil { return nil, err } @@ -195,7 +189,7 @@ func (c *collection) updateWithIDs( return nil, err } - err = c.update(ctx, identity, doc) + err = c.update(ctx, doc) if err != nil { return nil, err } @@ -208,7 +202,6 @@ func (c *collection) updateWithIDs( func (c *collection) updateWithFilter( ctx context.Context, - identity immutable.Option[string], filter any, updater string, ) (*client.UpdateResult, error) { @@ -229,7 +222,7 @@ func (c *collection) updateWithFilter( } // Make a selection plan that will scan through only the documents with matching filter. - selectionPlan, err := c.makeSelectionPlan(ctx, identity, filter) + selectionPlan, err := c.makeSelectionPlan(ctx, filter) if err != nil { return nil, err } @@ -283,7 +276,7 @@ func (c *collection) updateWithFilter( } } - err = c.update(ctx, identity, doc) + err = c.update(ctx, doc) if err != nil { return nil, err } @@ -316,7 +309,6 @@ func (c *collection) isSecondaryIDField(fieldDesc client.FieldDefinition) (clien // patched. func (c *collection) patchPrimaryDoc( ctx context.Context, - identity immutable.Option[string], secondaryCollectionName string, relationFieldDescription client.FieldDefinition, docID string, @@ -350,7 +342,6 @@ func (c *collection) patchPrimaryDoc( doc, err := primaryCol.Get( ctx, - identity, primaryDocID, false, ) @@ -367,7 +358,6 @@ func (c *collection) patchPrimaryDoc( pc := c.db.newCollection(primaryCol.Description(), primarySchema) err = pc.validateOneToOneLinkDoesntAlreadyExist( ctx, - identity, primaryDocID.String(), primaryIDField, docID, @@ -390,7 +380,7 @@ func (c *collection) patchPrimaryDoc( return err } - err = primaryCol.Update(ctx, identity, doc) + err = primaryCol.Update(ctx, doc) if err != nil { return err } @@ -404,7 +394,6 @@ func (c *collection) patchPrimaryDoc( // Additionally it only requests for the root scalar fields of the object func (c *collection) makeSelectionPlan( ctx context.Context, - identity immutable.Option[string], filter any, ) (planner.RequestPlan, error) { var f immutable.Option[request.Filter] @@ -431,6 +420,7 @@ func (c *collection) makeSelectionPlan( } txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) planner := planner.New( ctx, identity, diff --git a/db/context.go b/db/context.go index f235475d24..88019af323 100644 --- a/db/context.go +++ b/db/context.go @@ -13,12 +13,18 @@ package db import ( "context" + "github.com/sourcenetwork/immutable" + + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/datastore" ) // txnContextKey is the key type for transaction context values. type txnContextKey struct{} +// identityContextKey is the key type for ACP identity context values. +type identityContextKey struct{} + // explicitTxn is a transaction that is managed outside of a db operation. type explicitTxn struct { datastore.Txn @@ -37,18 +43,21 @@ type transactionDB interface { NewTxn(context.Context, bool) (datastore.Txn, error) } -// ensureContextTxn ensures that the returned context has a transaction. +// ensureContextTxn ensures that the returned context has a transaction +// and an identity. // // If a transactions exists on the context it will be made explicit, // otherwise a new implicit transaction will be created. // -// The returned context will contain the transaction +// The returned context will contain the transaction and identity // along with the copied values from the input context. func ensureContextTxn(ctx context.Context, db transactionDB, readOnly bool) (context.Context, datastore.Txn, error) { + // explicit transaction txn, ok := TryGetContextTxn(ctx) if ok { return SetContextTxn(ctx, &explicitTxn{txn}), &explicitTxn{txn}, nil } + // implicit transaction txn, err := db.NewTxn(ctx, readOnly) if err != nil { return nil, txn, err @@ -77,3 +86,27 @@ func TryGetContextTxn(ctx context.Context) (datastore.Txn, bool) { func SetContextTxn(ctx context.Context, txn datastore.Txn) context.Context { return context.WithValue(ctx, txnContextKey{}, txn) } + +// TryGetContextTxn returns an identity and a bool indicating if the +// identity was retrieved from the given context. + +// GetContextIdentity returns the identity from the given context. +// +// If an identity does not exist `NoIdentity` is returned. +func GetContextIdentity(ctx context.Context) immutable.Option[acpIdentity.Identity] { + identity, ok := ctx.Value(identityContextKey{}).(acpIdentity.Identity) + if ok { + return immutable.Some(identity) + } + return acpIdentity.None +} + +// SetContextTxn returns a new context with the identity value set. +// +// This will overwrite any previously set identity value. +func SetContextIdentity(ctx context.Context, identity immutable.Option[acpIdentity.Identity]) context.Context { + if identity.HasValue() { + return context.WithValue(ctx, identityContextKey{}, identity.Value()) + } + return context.WithValue(ctx, identityContextKey{}, nil) +} diff --git a/db/db.go b/db/db.go index 327f8e9c9e..613eea0b23 100644 --- a/db/db.go +++ b/db/db.go @@ -155,16 +155,15 @@ func (db *db) LensRegistry() client.LensRegistry { func (db *db) AddPolicy( ctx context.Context, - creator string, policy string, ) (client.AddPolicyResult, error) { if !db.acp.HasValue() { return client.AddPolicyResult{}, client.ErrPolicyAddFailureNoACP } - + identity := GetContextIdentity(ctx) policyID, err := db.acp.Value().AddPolicy( ctx, - creator, + identity.Value().String(), policy, ) diff --git a/db/fetcher/fetcher.go b/db/fetcher/fetcher.go index db20310768..894361dea4 100644 --- a/db/fetcher/fetcher.go +++ b/db/fetcher/fetcher.go @@ -21,6 +21,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -60,7 +61,7 @@ func (s *ExecInfo) Reset() { type Fetcher interface { Init( ctx context.Context, - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, acp immutable.Option[acp.ACP], col client.Collection, @@ -87,7 +88,7 @@ var ( // DocumentFetcher is a utility to incrementally fetch all the documents. type DocumentFetcher struct { - identity immutable.Option[string] + identity immutable.Option[acpIdentity.Identity] acp immutable.Option[acp.ACP] passedPermissionCheck bool // have valid permission to access @@ -146,7 +147,7 @@ type DocumentFetcher struct { // Init implements DocumentFetcher. func (df *DocumentFetcher) Init( ctx context.Context, - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, acp immutable.Option[acp.ACP], col client.Collection, @@ -175,7 +176,7 @@ func (df *DocumentFetcher) Init( } func (df *DocumentFetcher) init( - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], acp immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index 84b1cba103..2e776fd55b 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -16,6 +16,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -57,7 +58,7 @@ func NewIndexFetcher( func (f *IndexFetcher) Init( ctx context.Context, - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, acp immutable.Option[acp.ACP], col client.Collection, diff --git a/db/fetcher/mocks/fetcher.go b/db/fetcher/mocks/fetcher.go index e789032e47..44421230a0 100644 --- a/db/fetcher/mocks/fetcher.go +++ b/db/fetcher/mocks/fetcher.go @@ -14,6 +14,8 @@ import ( fetcher "github.com/sourcenetwork/defradb/db/fetcher" + identity "github.com/sourcenetwork/defradb/acp/identity" + immutable "github.com/sourcenetwork/immutable" mapper "github.com/sourcenetwork/defradb/planner/mapper" @@ -136,13 +138,13 @@ func (_c *Fetcher_FetchNext_Call) RunAndReturn(run func(context.Context) (fetche return _c } -// Init provides a mock function with given fields: ctx, identity, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted -func (_m *Fetcher) Init(ctx context.Context, identity immutable.Option[string], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool) error { - ret := _m.Called(ctx, identity, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) +// Init provides a mock function with given fields: ctx, id, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted +func (_m *Fetcher) Init(ctx context.Context, id immutable.Option[identity.Identity], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool) error { + ret := _m.Called(ctx, id, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[string], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error); ok { - r0 = rf(ctx, identity, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[identity.Identity], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error); ok { + r0 = rf(ctx, id, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) } else { r0 = ret.Error(0) } @@ -157,7 +159,7 @@ type Fetcher_Init_Call struct { // Init is a helper method to define mock.On call // - ctx context.Context -// - identity immutable.Option[string] +// - id immutable.Option[identity.Identity] // - txn datastore.Txn // - _a3 immutable.Option[acp.ACP] // - col client.Collection @@ -166,13 +168,13 @@ type Fetcher_Init_Call struct { // - docmapper *core.DocumentMapping // - reverse bool // - showDeleted bool -func (_e *Fetcher_Expecter) Init(ctx interface{}, identity interface{}, txn interface{}, _a3 interface{}, col interface{}, fields interface{}, filter interface{}, docmapper interface{}, reverse interface{}, showDeleted interface{}) *Fetcher_Init_Call { - return &Fetcher_Init_Call{Call: _e.mock.On("Init", ctx, identity, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted)} +func (_e *Fetcher_Expecter) Init(ctx interface{}, id interface{}, txn interface{}, _a3 interface{}, col interface{}, fields interface{}, filter interface{}, docmapper interface{}, reverse interface{}, showDeleted interface{}) *Fetcher_Init_Call { + return &Fetcher_Init_Call{Call: _e.mock.On("Init", ctx, id, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted)} } -func (_c *Fetcher_Init_Call) Run(run func(ctx context.Context, identity immutable.Option[string], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool)) *Fetcher_Init_Call { +func (_c *Fetcher_Init_Call) Run(run func(ctx context.Context, id immutable.Option[identity.Identity], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool)) *Fetcher_Init_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(immutable.Option[string]), args[2].(datastore.Txn), args[3].(immutable.Option[acp.ACP]), args[4].(client.Collection), args[5].([]client.FieldDefinition), args[6].(*mapper.Filter), args[7].(*core.DocumentMapping), args[8].(bool), args[9].(bool)) + run(args[0].(context.Context), args[1].(immutable.Option[identity.Identity]), args[2].(datastore.Txn), args[3].(immutable.Option[acp.ACP]), args[4].(client.Collection), args[5].([]client.FieldDefinition), args[6].(*mapper.Filter), args[7].(*core.DocumentMapping), args[8].(bool), args[9].(bool)) }) return _c } @@ -182,7 +184,7 @@ func (_c *Fetcher_Init_Call) Return(_a0 error) *Fetcher_Init_Call { return _c } -func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, immutable.Option[string], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error) *Fetcher_Init_Call { +func (_c *Fetcher_Init_Call) RunAndReturn(run func(context.Context, immutable.Option[identity.Identity], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error) *Fetcher_Init_Call { _c.Call.Return(run) return _c } diff --git a/db/fetcher/versioned.go b/db/fetcher/versioned.go index 16a4515939..096002521c 100644 --- a/db/fetcher/versioned.go +++ b/db/fetcher/versioned.go @@ -22,6 +22,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" @@ -104,7 +105,7 @@ type VersionedFetcher struct { // Init initializes the VersionedFetcher. func (vf *VersionedFetcher) Init( ctx context.Context, - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, acp immutable.Option[acp.ACP], col client.Collection, diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 99a4c9ee56..86746a9558 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -48,7 +48,7 @@ type productDoc struct { } func (f *indexTestFixture) saveDocToCollection(doc *client.Document, col client.Collection) { - err := col.Create(f.ctx, acpIdentity.NoIdentity, doc) + err := col.Create(f.ctx, doc) require.NoError(f.t, err) f.commitTxn() f.txn, err = f.db.NewTxn(f.ctx, false) @@ -324,7 +324,7 @@ func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) { dataStoreOn.Put(mock.Anything, mock.Anything, mock.Anything).Return(nil) ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, acpIdentity.NoIdentity, doc) + err := f.users.Create(ctx, doc) require.ErrorIs(f.t, err, NewErrFailedToStoreIndexedField("name", nil)) } @@ -342,7 +342,7 @@ func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { doc, err := client.NewDocFromJSON(data, f.users.Schema()) require.NoError(f.t, err) - err = f.users.Create(f.ctx, acpIdentity.NoIdentity, doc) + err = f.users.Create(f.ctx, doc) require.NoError(f.t, err) key := newIndexKeyBuilder(f).Col(usersColName).Build() @@ -363,7 +363,7 @@ func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) Return(mocks.NewQueryResultsWithValues(t, []byte("invalid")), nil) ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, acpIdentity.NoIdentity, doc) + err := f.users.Create(ctx, doc) assert.ErrorIs(t, err, datastore.NewErrInvalidStoredValue(nil)) } @@ -382,7 +382,7 @@ func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { Return(nil, testErr) ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, acpIdentity.NoIdentity, doc) + err := f.users.Create(ctx, doc) require.ErrorIs(t, err, testErr) } @@ -415,9 +415,9 @@ func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t userDoc := f.newUserDoc("John", 21, users) prodDoc := f.newProdDoc(1, 3, "games", products) - err = users.Create(f.ctx, acpIdentity.NoIdentity, userDoc) + err = users.Create(f.ctx, userDoc) require.NoError(f.t, err) - err = products.Create(f.ctx, acpIdentity.NoIdentity, prodDoc) + err = products.Create(f.ctx, prodDoc) require.NoError(f.t, err) f.commitTxn() @@ -749,14 +749,14 @@ func TestNonUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { Name: "update", NewValue: "Islam", Exec: func(doc *client.Document) error { - return f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) + return f.users.Update(f.ctx, doc) }, }, { Name: "save", NewValue: "Andy", Exec: func(doc *client.Document) error { - return f.users.Save(f.ctx, acpIdentity.NoIdentity, doc) + return f.users.Save(f.ctx, doc) }, }, } @@ -812,7 +812,7 @@ func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) return fetcherMocks.NewStubbedFetcher(t) } ctx = SetContextTxn(f.ctx, mockedTxn) - err = usersCol.Update(ctx, acpIdentity.NoIdentity, doc) + err = usersCol.Update(ctx, doc) require.ErrorIs(t, err, testErr) } @@ -908,7 +908,7 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { err := doc.Set(usersNameFieldName, "Islam") require.NoError(t, err, tc.Name) - err = f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) + err = f.users.Update(f.ctx, doc) require.Error(t, err, tc.Name) newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build() @@ -936,7 +936,7 @@ func TestNonUniqueUpdate_IfFailsToUpdateIndex_ReturnError(t *testing.T) { err = doc.Set(usersAgeFieldName, 23) require.NoError(t, err) - err = f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) + err = f.users.Update(f.ctx, doc) require.ErrorIs(t, err, ErrCorruptedIndex) } @@ -974,7 +974,7 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) { ). RunAndReturn(func( ctx context.Context, - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, acp immutable.Option[acp.ACP], col client.Collection, @@ -996,7 +996,7 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) { err := doc.Set(usersNameFieldName, "Islam") require.NoError(t, err) - _ = f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) + _ = f.users.Update(f.ctx, doc) } func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { @@ -1055,7 +1055,7 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { mockedTxn.EXPECT().Datastore().Return(mockedTxn.MockDatastore).Maybe() ctx := SetContextTxn(f.ctx, mockedTxn) - err = f.users.Update(ctx, acpIdentity.NoIdentity, doc) + err = f.users.Update(ctx, doc) require.ErrorIs(t, err, testErr) } } @@ -1080,7 +1080,7 @@ func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) { err = doc.Set(usersNameFieldName, "John") require.NoError(f.t, err) - err = f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) + err = f.users.Update(f.ctx, doc) require.NoError(f.t, err) f.commitTxn() @@ -1204,14 +1204,14 @@ func TestUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { Name: "update", NewValue: "Islam", Exec: func(doc *client.Document) error { - return f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) + return f.users.Update(f.ctx, doc) }, }, { Name: "save", NewValue: "Andy", Exec: func(doc *client.Document) error { - return f.users.Save(f.ctx, acpIdentity.NoIdentity, doc) + return f.users.Save(f.ctx, doc) }, }, } @@ -1347,7 +1347,7 @@ func TestUniqueComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T newKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName, usersAgeFieldName). Doc(doc).Unique().Build() - require.NoError(t, f.users.Update(f.ctx, acpIdentity.NoIdentity, doc), tc.Name) + require.NoError(t, f.users.Update(f.ctx, doc), tc.Name) f.commitTxn() _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) @@ -1396,7 +1396,7 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { NewValue: "Islam", Field: usersNameFieldName, Exec: func(doc *client.Document) error { - return f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) + return f.users.Update(f.ctx, doc) }, }, { @@ -1404,7 +1404,7 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { NewValue: "Andy", Field: usersNameFieldName, Exec: func(doc *client.Document) error { - return f.users.Save(f.ctx, acpIdentity.NoIdentity, doc) + return f.users.Save(f.ctx, doc) }, }, { @@ -1412,7 +1412,7 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { NewValue: 33, Field: usersAgeFieldName, Exec: func(doc *client.Document) error { - return f.users.Update(f.ctx, acpIdentity.NoIdentity, doc) + return f.users.Update(f.ctx, doc) }, }, { @@ -1420,7 +1420,7 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { NewValue: 36, Field: usersAgeFieldName, Exec: func(doc *client.Document) error { - return f.users.Save(f.ctx, acpIdentity.NoIdentity, doc) + return f.users.Save(f.ctx, doc) }, }, } diff --git a/db/permission/check.go b/db/permission/check.go index b62b9c384b..36dce10489 100644 --- a/db/permission/check.go +++ b/db/permission/check.go @@ -16,6 +16,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) @@ -34,7 +35,7 @@ import ( // - Document is public (unregistered), whether signatured request or not doesn't matter. func CheckAccessOfDocOnCollectionWithACP( ctx context.Context, - identityOptional immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], acpSystem acp.ACP, collection client.Collection, permission acp.DPIPermission, @@ -69,7 +70,7 @@ func CheckAccessOfDocOnCollectionWithACP( // At this point if the request is not signatured, then it has no access, because: // the collection has a policy on it, and the acp is enabled/available, // and the document is not public (is regestered with acp). - if !identityOptional.HasValue() { + if !identity.HasValue() { return false, nil } @@ -77,7 +78,7 @@ func CheckAccessOfDocOnCollectionWithACP( hasAccess, err := acpSystem.CheckDocAccess( ctx, permission, - identityOptional.Value(), + identity.Value().String(), policyID, resourceName, docID, diff --git a/db/permission/register.go b/db/permission/register.go index b638a015db..a46e5eef34 100644 --- a/db/permission/register.go +++ b/db/permission/register.go @@ -16,6 +16,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) @@ -30,7 +31,7 @@ import ( // Otherwise, nothing is registered with acp. func RegisterDocOnCollectionWithACP( ctx context.Context, - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], acpSystem acp.ACP, collection client.Collection, docID string, @@ -39,7 +40,7 @@ func RegisterDocOnCollectionWithACP( if policyID, resourceName, hasPolicy := isPermissioned(collection); hasPolicy && identity.HasValue() { return acpSystem.RegisterDocObject( ctx, - identity.Value(), + identity.Value().String(), policyID, resourceName, docID, diff --git a/db/request.go b/db/request.go index 099f8852ed..83a2fb09bb 100644 --- a/db/request.go +++ b/db/request.go @@ -13,18 +13,12 @@ package db import ( "context" - "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/planner" ) // execRequest executes a request against the database. -func (db *db) execRequest( - ctx context.Context, - identity immutable.Option[string], - request string, -) *client.RequestResult { +func (db *db) execRequest(ctx context.Context, request string) *client.RequestResult { res := &client.RequestResult{} ast, err := db.parser.BuildRequestAST(request) if err != nil { @@ -49,11 +43,12 @@ func (db *db) execRequest( if pub != nil { res.Pub = pub - go db.handleSubscription(ctx, identity, pub, subRequest) + go db.handleSubscription(ctx, pub, subRequest) return res } txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) planner := planner.New( ctx, identity, diff --git a/db/store.go b/db/store.go index 5a3f3f7ad6..1686b9af3e 100644 --- a/db/store.go +++ b/db/store.go @@ -21,11 +21,7 @@ import ( ) // ExecRequest executes a request against the database. -func (db *db) ExecRequest( - ctx context.Context, - identity immutable.Option[string], - request string, -) *client.RequestResult { +func (db *db) ExecRequest(ctx context.Context, request string) *client.RequestResult { ctx, txn, err := ensureContextTxn(ctx, db, false) if err != nil { res := &client.RequestResult{} @@ -34,7 +30,7 @@ func (db *db) ExecRequest( } defer txn.Discard(ctx) - res := db.execRequest(ctx, identity, request) + res := db.execRequest(ctx, request) if len(res.GQL.Errors) > 0 { return res } diff --git a/db/subscriptions.go b/db/subscriptions.go index a8c8f5bb42..0d16074887 100644 --- a/db/subscriptions.go +++ b/db/subscriptions.go @@ -13,8 +13,6 @@ package db import ( "context" - "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/events" @@ -50,7 +48,6 @@ func (db *db) checkForClientSubscriptions(r *request.Request) ( func (db *db) handleSubscription( ctx context.Context, - identity immutable.Option[string], pub *events.Publisher[events.Update], r *request.ObjectSubscription, ) { @@ -62,19 +59,19 @@ func (db *db) handleSubscription( } ctx := SetContextTxn(ctx, txn) - db.handleEvent(ctx, identity, pub, evt, r) + db.handleEvent(ctx, pub, evt, r) txn.Discard(ctx) } } func (db *db) handleEvent( ctx context.Context, - identity immutable.Option[string], pub *events.Publisher[events.Update], evt events.Update, r *request.ObjectSubscription, ) { txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) p := planner.New( ctx, identity, diff --git a/docs/cli/defradb_client.md b/docs/cli/defradb_client.md index 1b5532ac9f..e799d5b4f7 100644 --- a/docs/cli/defradb_client.md +++ b/docs/cli/defradb_client.md @@ -10,8 +10,9 @@ Execute queries, add schema types, obtain node info, etc. ### Options ``` - -h, --help help for client - --tx uint Transaction ID + -h, --help help for client + -i, --identity string ACP Identity + --tx uint Transaction ID ``` ### Options inherited from parent commands diff --git a/docs/cli/defradb_client_acp.md b/docs/cli/defradb_client_acp.md index ab4ac22d6d..43d1cae8ac 100644 --- a/docs/cli/defradb_client_acp.md +++ b/docs/cli/defradb_client_acp.md @@ -20,6 +20,7 @@ Learn more about [ACP](/acp/README.md) ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_acp_policy.md b/docs/cli/defradb_client_acp_policy.md index ef6d02e3dc..54445062c1 100644 --- a/docs/cli/defradb_client_acp_policy.md +++ b/docs/cli/defradb_client_acp_policy.md @@ -16,6 +16,7 @@ Interact with the acp policy features of DefraDB instance ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_acp_policy_add.md b/docs/cli/defradb_client_acp_policy_add.md index 322842a962..65f3aa68d8 100644 --- a/docs/cli/defradb_client_acp_policy_add.md +++ b/docs/cli/defradb_client_acp_policy_add.md @@ -56,15 +56,15 @@ defradb client acp policy add [-i --identity] [policy] [flags] ### Options ``` - -f, --file string File to load a policy from - -h, --help help for add - -i, --identity string [Required] Identity of the creator + -f, --file string File to load a policy from + -h, --help help for add ``` ### Options inherited from parent commands ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_backup.md b/docs/cli/defradb_client_backup.md index c8b48818b9..6fdd82b1cf 100644 --- a/docs/cli/defradb_client_backup.md +++ b/docs/cli/defradb_client_backup.md @@ -17,6 +17,7 @@ Currently only supports JSON format. ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_backup_export.md b/docs/cli/defradb_client_backup_export.md index c47e46de3f..e950883096 100644 --- a/docs/cli/defradb_client_backup_export.md +++ b/docs/cli/defradb_client_backup_export.md @@ -31,6 +31,7 @@ defradb client backup export [-c --collections | -p --pretty | -f --format] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_collection.md b/docs/cli/defradb_client_collection.md index c33b9970bf..94c5f5975a 100644 --- a/docs/cli/defradb_client_collection.md +++ b/docs/cli/defradb_client_collection.md @@ -9,12 +9,13 @@ Create, read, update, and delete documents within a collection. ### Options ``` - --get-inactive Get inactive collections as well as active - -h, --help help for collection - --name string Collection name - --schema string Collection schema Root - --tx uint Transaction ID - --version string Collection version ID + --get-inactive Get inactive collections as well as active + -h, --help help for collection + -i, --identity string ACP Identity + --name string Collection name + --schema string Collection schema Root + --tx uint Transaction ID + --version string Collection version ID ``` ### Options inherited from parent commands diff --git a/docs/cli/defradb_client_collection_create.md b/docs/cli/defradb_client_collection_create.md index 492bb6a060..daaaa2902f 100644 --- a/docs/cli/defradb_client_collection_create.md +++ b/docs/cli/defradb_client_collection_create.md @@ -29,9 +29,8 @@ defradb client collection create [-i --identity] [flags] ### Options ``` - -f, --file string File containing document(s) - -h, --help help for create - -i, --identity string Identity of the actor + -f, --file string File containing document(s) + -h, --help help for create ``` ### Options inherited from parent commands @@ -39,6 +38,7 @@ defradb client collection create [-i --identity] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_collection_delete.md b/docs/cli/defradb_client_collection_delete.md index ce2f6ff8ab..f3c678e857 100644 --- a/docs/cli/defradb_client_collection_delete.md +++ b/docs/cli/defradb_client_collection_delete.md @@ -23,10 +23,9 @@ defradb client collection delete [-i --identity] [--filter --docID --docID [flags] ### Options ``` - -h, --help help for get - -i, --identity string Identity of the actor - --show-deleted Show deleted documents + -h, --help help for get + --show-deleted Show deleted documents ``` ### Options inherited from parent commands @@ -30,6 +29,7 @@ defradb client collection get [-i --identity] [--show-deleted] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_collection_patch.md b/docs/cli/defradb_client_collection_patch.md index a5d0179a41..25cff2d710 100644 --- a/docs/cli/defradb_client_collection_patch.md +++ b/docs/cli/defradb_client_collection_patch.md @@ -35,6 +35,7 @@ defradb client collection patch [patch] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests --get-inactive Get inactive collections as well as active + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_collection_update.md b/docs/cli/defradb_client_collection_update.md index dd2d1864c3..201643b835 100644 --- a/docs/cli/defradb_client_collection_update.md +++ b/docs/cli/defradb_client_collection_update.md @@ -29,11 +29,10 @@ defradb client collection update [-i --identity] [--filter --docID --docID --fields [-n - ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_index_drop.md b/docs/cli/defradb_client_index_drop.md index 9659958a28..7e91607c3a 100644 --- a/docs/cli/defradb_client_index_drop.md +++ b/docs/cli/defradb_client_index_drop.md @@ -25,6 +25,7 @@ defradb client index drop -c --collection -n --name [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_index_list.md b/docs/cli/defradb_client_index_list.md index ee00938d6e..563b622cde 100644 --- a/docs/cli/defradb_client_index_list.md +++ b/docs/cli/defradb_client_index_list.md @@ -27,6 +27,7 @@ defradb client index list [-c --collection ] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p.md b/docs/cli/defradb_client_p2p.md index 020506b92f..5ac781aa5c 100644 --- a/docs/cli/defradb_client_p2p.md +++ b/docs/cli/defradb_client_p2p.md @@ -16,6 +16,7 @@ Interact with the DefraDB P2P system ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_collection.md b/docs/cli/defradb_client_p2p_collection.md index 873362f041..f664c94414 100644 --- a/docs/cli/defradb_client_p2p_collection.md +++ b/docs/cli/defradb_client_p2p_collection.md @@ -17,6 +17,7 @@ The selected collections synchronize their events on the pubsub network. ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_collection_add.md b/docs/cli/defradb_client_p2p_collection_add.md index 6fa00a5673..5a45967831 100644 --- a/docs/cli/defradb_client_p2p_collection_add.md +++ b/docs/cli/defradb_client_p2p_collection_add.md @@ -28,6 +28,7 @@ defradb client p2p collection add [collectionIDs] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_collection_getall.md b/docs/cli/defradb_client_p2p_collection_getall.md index e2946022cb..66b8f919c6 100644 --- a/docs/cli/defradb_client_p2p_collection_getall.md +++ b/docs/cli/defradb_client_p2p_collection_getall.md @@ -21,6 +21,7 @@ defradb client p2p collection getall [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_collection_remove.md b/docs/cli/defradb_client_p2p_collection_remove.md index da09bdf70f..e8f8320222 100644 --- a/docs/cli/defradb_client_p2p_collection_remove.md +++ b/docs/cli/defradb_client_p2p_collection_remove.md @@ -28,6 +28,7 @@ defradb client p2p collection remove [collectionIDs] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_info.md b/docs/cli/defradb_client_p2p_info.md index 809d84fb6b..7a84f1152f 100644 --- a/docs/cli/defradb_client_p2p_info.md +++ b/docs/cli/defradb_client_p2p_info.md @@ -20,6 +20,7 @@ defradb client p2p info [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_replicator.md b/docs/cli/defradb_client_p2p_replicator.md index 8b5dc88fdb..3dbe42b935 100644 --- a/docs/cli/defradb_client_p2p_replicator.md +++ b/docs/cli/defradb_client_p2p_replicator.md @@ -17,6 +17,7 @@ A replicator replicates one or all collection(s) from one node to another. ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_replicator_delete.md b/docs/cli/defradb_client_p2p_replicator_delete.md index c5fded6a5b..88c68fa6b4 100644 --- a/docs/cli/defradb_client_p2p_replicator_delete.md +++ b/docs/cli/defradb_client_p2p_replicator_delete.md @@ -26,6 +26,7 @@ defradb client p2p replicator delete [-c, --collection] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_replicator_getall.md b/docs/cli/defradb_client_p2p_replicator_getall.md index 9f983de9fa..38c015f24d 100644 --- a/docs/cli/defradb_client_p2p_replicator_getall.md +++ b/docs/cli/defradb_client_p2p_replicator_getall.md @@ -25,6 +25,7 @@ defradb client p2p replicator getall [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_p2p_replicator_set.md b/docs/cli/defradb_client_p2p_replicator_set.md index 0f3446c87b..24e514250a 100644 --- a/docs/cli/defradb_client_p2p_replicator_set.md +++ b/docs/cli/defradb_client_p2p_replicator_set.md @@ -26,6 +26,7 @@ defradb client p2p replicator set [-c, --collection] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_query.md b/docs/cli/defradb_client_query.md index cd320e9de6..cd53fcaf48 100644 --- a/docs/cli/defradb_client_query.md +++ b/docs/cli/defradb_client_query.md @@ -30,15 +30,15 @@ defradb client query [-i --identity] [request] [flags] ### Options ``` - -f, --file string File containing the query request - -h, --help help for query - -i, --identity string Identity of the actor + -f, --file string File containing the query request + -h, --help help for query ``` ### Options inherited from parent commands ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema.md b/docs/cli/defradb_client_schema.md index 2e5a7db88c..569d8e95f9 100644 --- a/docs/cli/defradb_client_schema.md +++ b/docs/cli/defradb_client_schema.md @@ -16,6 +16,7 @@ Make changes, updates, or look for existing schema types. ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_add.md b/docs/cli/defradb_client_schema_add.md index dc72a6a354..e425110a1b 100644 --- a/docs/cli/defradb_client_schema_add.md +++ b/docs/cli/defradb_client_schema_add.md @@ -37,6 +37,7 @@ defradb client schema add [schema] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_describe.md b/docs/cli/defradb_client_schema_describe.md index 3ab0c1dda8..cf94bed188 100644 --- a/docs/cli/defradb_client_schema_describe.md +++ b/docs/cli/defradb_client_schema_describe.md @@ -36,6 +36,7 @@ defradb client schema describe [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_migration.md b/docs/cli/defradb_client_schema_migration.md index 2ee26c8521..3f4640a672 100644 --- a/docs/cli/defradb_client_schema_migration.md +++ b/docs/cli/defradb_client_schema_migration.md @@ -16,6 +16,7 @@ Make set or look for existing schema migrations on a DefraDB node. ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_migration_down.md b/docs/cli/defradb_client_schema_migration_down.md index e5541396f6..72cd07b96e 100644 --- a/docs/cli/defradb_client_schema_migration_down.md +++ b/docs/cli/defradb_client_schema_migration_down.md @@ -33,6 +33,7 @@ defradb client schema migration down --collection [fl ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_migration_reload.md b/docs/cli/defradb_client_schema_migration_reload.md index ef89b749e8..8e56cd1a0c 100644 --- a/docs/cli/defradb_client_schema_migration_reload.md +++ b/docs/cli/defradb_client_schema_migration_reload.md @@ -20,6 +20,7 @@ defradb client schema migration reload [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_migration_set-registry.md b/docs/cli/defradb_client_schema_migration_set-registry.md index 2eae5aba48..cf0b117b30 100644 --- a/docs/cli/defradb_client_schema_migration_set-registry.md +++ b/docs/cli/defradb_client_schema_migration_set-registry.md @@ -26,6 +26,7 @@ defradb client schema migration set-registry [collectionID] [cfg] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_migration_set.md b/docs/cli/defradb_client_schema_migration_set.md index 1a5f923218..135e39308b 100644 --- a/docs/cli/defradb_client_schema_migration_set.md +++ b/docs/cli/defradb_client_schema_migration_set.md @@ -33,6 +33,7 @@ defradb client schema migration set [src] [dst] [cfg] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_migration_up.md b/docs/cli/defradb_client_schema_migration_up.md index 62fd063cd4..863d033f91 100644 --- a/docs/cli/defradb_client_schema_migration_up.md +++ b/docs/cli/defradb_client_schema_migration_up.md @@ -33,6 +33,7 @@ defradb client schema migration up --collection [flag ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_patch.md b/docs/cli/defradb_client_schema_patch.md index 393786eb05..796604ff4b 100644 --- a/docs/cli/defradb_client_schema_patch.md +++ b/docs/cli/defradb_client_schema_patch.md @@ -36,6 +36,7 @@ defradb client schema patch [schema] [migration] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_schema_set-active.md b/docs/cli/defradb_client_schema_set-active.md index 77ec843f9c..909e54a925 100644 --- a/docs/cli/defradb_client_schema_set-active.md +++ b/docs/cli/defradb_client_schema_set-active.md @@ -21,6 +21,7 @@ defradb client schema set-active [versionID] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_tx.md b/docs/cli/defradb_client_tx.md index f3007f574e..f1e38f6b6c 100644 --- a/docs/cli/defradb_client_tx.md +++ b/docs/cli/defradb_client_tx.md @@ -16,6 +16,7 @@ Create, commit, and discard DefraDB transactions ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_tx_commit.md b/docs/cli/defradb_client_tx_commit.md index e9ae2e529d..536fdd1284 100644 --- a/docs/cli/defradb_client_tx_commit.md +++ b/docs/cli/defradb_client_tx_commit.md @@ -20,6 +20,7 @@ defradb client tx commit [id] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_tx_create.md b/docs/cli/defradb_client_tx_create.md index a2c45a9b44..7a986e416a 100644 --- a/docs/cli/defradb_client_tx_create.md +++ b/docs/cli/defradb_client_tx_create.md @@ -22,6 +22,7 @@ defradb client tx create [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_tx_discard.md b/docs/cli/defradb_client_tx_discard.md index 2b1b4badb8..fc0bb2a8db 100644 --- a/docs/cli/defradb_client_tx_discard.md +++ b/docs/cli/defradb_client_tx_discard.md @@ -20,6 +20,7 @@ defradb client tx discard [id] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_view.md b/docs/cli/defradb_client_view.md index 8b8f47e8bc..3c77088c29 100644 --- a/docs/cli/defradb_client_view.md +++ b/docs/cli/defradb_client_view.md @@ -16,6 +16,7 @@ Manage (add) views withing a running DefraDB instance ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/docs/cli/defradb_client_view_add.md b/docs/cli/defradb_client_view_add.md index e522b86f1b..1e7b94f2b8 100644 --- a/docs/cli/defradb_client_view_add.md +++ b/docs/cli/defradb_client_view_add.md @@ -26,6 +26,7 @@ defradb client view add [query] [sdl] [transform] [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") --log-output string Log output path. Options are stderr or stdout. (default "stderr") diff --git a/http/client.go b/http/client.go index 8837ce2e2d..4eaadfd2d0 100644 --- a/http/client.go +++ b/http/client.go @@ -339,7 +339,6 @@ func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][ func (c *Client) ExecRequest( ctx context.Context, - identity immutable.Option[string], query string, ) *client.RequestResult { methodURL := c.http.baseURL.JoinPath("graphql") @@ -356,7 +355,6 @@ func (c *Client) ExecRequest( return result } c.http.setDefaultHeaders(req) - addIdentityToAuthHeaderIfExists(req, identity) res, err := c.http.client.Do(req) if err != nil { diff --git a/http/client_acp.go b/http/client_acp.go index bdd9e6ed3b..a0140cf437 100644 --- a/http/client_acp.go +++ b/http/client_acp.go @@ -11,45 +11,26 @@ package http import ( - "bytes" "context" - "encoding/json" "net/http" + "strings" "github.com/sourcenetwork/defradb/client" ) -// AddPolicyResult wraps the result of successfully adding/registering a Policy. -type AddPolicyRequest struct { - // Policy body in JSON or YAML format. - Policy string `json:"policy"` -} - func (c *Client) AddPolicy( ctx context.Context, - creator string, policy string, ) (client.AddPolicyResult, error) { methodURL := c.http.baseURL.JoinPath("acp", "policy") - addPolicyRequest := AddPolicyRequest{ - Policy: policy, - } - - addPolicyBody, err := json.Marshal(addPolicyRequest) - if err != nil { - return client.AddPolicyResult{}, err - } - req, err := http.NewRequestWithContext( ctx, http.MethodPost, methodURL.String(), - bytes.NewBuffer(addPolicyBody), + strings.NewReader(policy), ) - addIdentityToAuthHeader(req, creator) - if err != nil { return client.AddPolicyResult{}, err } diff --git a/http/client_collection.go b/http/client_collection.go index 39ede6aafc..d91601fbeb 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -61,7 +61,6 @@ func (c *Collection) Definition() client.CollectionDefinition { func (c *Collection) Create( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { if !c.Description().Name.HasValue() { @@ -80,8 +79,6 @@ func (c *Collection) Create( return err } - addIdentityToAuthHeaderIfExists(req, identity) - _, err = c.http.request(req) if err != nil { return err @@ -92,7 +89,6 @@ func (c *Collection) Create( func (c *Collection) CreateMany( ctx context.Context, - identity immutable.Option[string], docs []*client.Document, ) error { if !c.Description().Name.HasValue() { @@ -119,8 +115,6 @@ func (c *Collection) CreateMany( return err } - addIdentityToAuthHeaderIfExists(req, identity) - _, err = c.http.request(req) if err != nil { return err @@ -134,7 +128,6 @@ func (c *Collection) CreateMany( func (c *Collection) Update( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { if !c.Description().Name.HasValue() { @@ -152,8 +145,6 @@ func (c *Collection) Update( return err } - addIdentityToAuthHeaderIfExists(req, identity) - _, err = c.http.request(req) if err != nil { return err @@ -164,22 +155,20 @@ func (c *Collection) Update( func (c *Collection) Save( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { - _, err := c.Get(ctx, identity, doc.ID(), true) + _, err := c.Get(ctx, doc.ID(), true) if err == nil { - return c.Update(ctx, identity, doc) + return c.Update(ctx, doc) } if errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) { - return c.Create(ctx, identity, doc) + return c.Create(ctx, doc) } return err } func (c *Collection) Delete( ctx context.Context, - identity immutable.Option[string], docID client.DocID, ) (bool, error) { if !c.Description().Name.HasValue() { @@ -193,8 +182,6 @@ func (c *Collection) Delete( return false, err } - addIdentityToAuthHeaderIfExists(req, identity) - _, err = c.http.request(req) if err != nil { return false, err @@ -204,10 +191,9 @@ func (c *Collection) Delete( func (c *Collection) Exists( ctx context.Context, - identity immutable.Option[string], docID client.DocID, ) (bool, error) { - _, err := c.Get(ctx, identity, docID, false) + _, err := c.Get(ctx, docID, false) if err != nil { return false, err } @@ -216,17 +202,16 @@ func (c *Collection) Exists( func (c *Collection) UpdateWith( ctx context.Context, - identity immutable.Option[string], target any, updater string, ) (*client.UpdateResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, identity, t, updater) + return c.UpdateWithFilter(ctx, t, updater) case client.DocID: - return c.UpdateWithDocID(ctx, identity, t, updater) + return c.UpdateWithDocID(ctx, t, updater) case []client.DocID: - return c.UpdateWithDocIDs(ctx, identity, t, updater) + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -234,7 +219,6 @@ func (c *Collection) UpdateWith( func (c *Collection) updateWith( ctx context.Context, - identity immutable.Option[string], request CollectionUpdateRequest, ) (*client.UpdateResult, error) { if !c.Description().Name.HasValue() { @@ -252,8 +236,6 @@ func (c *Collection) updateWith( return nil, err } - addIdentityToAuthHeaderIfExists(req, identity) - var result client.UpdateResult if err := c.http.requestJson(req, &result); err != nil { return nil, err @@ -263,13 +245,11 @@ func (c *Collection) updateWith( func (c *Collection) UpdateWithFilter( ctx context.Context, - identity immutable.Option[string], filter any, updater string, ) (*client.UpdateResult, error) { return c.updateWith( ctx, - identity, CollectionUpdateRequest{ Filter: filter, Updater: updater, @@ -279,13 +259,11 @@ func (c *Collection) UpdateWithFilter( func (c *Collection) UpdateWithDocID( ctx context.Context, - identity immutable.Option[string], docID client.DocID, updater string, ) (*client.UpdateResult, error) { return c.updateWith( ctx, - identity, CollectionUpdateRequest{ DocID: docID.String(), Updater: updater, @@ -295,7 +273,6 @@ func (c *Collection) UpdateWithDocID( func (c *Collection) UpdateWithDocIDs( ctx context.Context, - identity immutable.Option[string], docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -305,7 +282,6 @@ func (c *Collection) UpdateWithDocIDs( } return c.updateWith( ctx, - identity, CollectionUpdateRequest{ DocIDs: strDocIDs, Updater: updater, @@ -315,16 +291,15 @@ func (c *Collection) UpdateWithDocIDs( func (c *Collection) DeleteWith( ctx context.Context, - identity immutable.Option[string], target any, ) (*client.DeleteResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, identity, t) + return c.DeleteWithFilter(ctx, t) case client.DocID: - return c.DeleteWithDocID(ctx, identity, t) + return c.DeleteWithDocID(ctx, t) case []client.DocID: - return c.DeleteWithDocIDs(ctx, identity, t) + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -332,7 +307,6 @@ func (c *Collection) DeleteWith( func (c *Collection) deleteWith( ctx context.Context, - identity immutable.Option[string], request CollectionDeleteRequest, ) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { @@ -351,8 +325,6 @@ func (c *Collection) deleteWith( return nil, err } - addIdentityToAuthHeaderIfExists(req, identity) - var result client.DeleteResult if err := c.http.requestJson(req, &result); err != nil { return nil, err @@ -362,12 +334,10 @@ func (c *Collection) deleteWith( func (c *Collection) DeleteWithFilter( ctx context.Context, - identity immutable.Option[string], filter any, ) (*client.DeleteResult, error) { return c.deleteWith( ctx, - identity, CollectionDeleteRequest{ Filter: filter, }, @@ -376,12 +346,10 @@ func (c *Collection) DeleteWithFilter( func (c *Collection) DeleteWithDocID( ctx context.Context, - identity immutable.Option[string], docID client.DocID, ) (*client.DeleteResult, error) { return c.deleteWith( ctx, - identity, CollectionDeleteRequest{ DocID: docID.String(), }, @@ -390,7 +358,6 @@ func (c *Collection) DeleteWithDocID( func (c *Collection) DeleteWithDocIDs( ctx context.Context, - identity immutable.Option[string], docIDs []client.DocID, ) (*client.DeleteResult, error) { var strDocIDs []string @@ -399,7 +366,6 @@ func (c *Collection) DeleteWithDocIDs( } return c.deleteWith( ctx, - identity, CollectionDeleteRequest{ DocIDs: strDocIDs, }, @@ -408,7 +374,6 @@ func (c *Collection) DeleteWithDocIDs( func (c *Collection) Get( ctx context.Context, - identity immutable.Option[string], docID client.DocID, showDeleted bool, ) (*client.Document, error) { @@ -429,8 +394,6 @@ func (c *Collection) Get( return nil, err } - addIdentityToAuthHeaderIfExists(req, identity) - data, err := c.http.request(req) if err != nil { return nil, err @@ -446,7 +409,6 @@ func (c *Collection) Get( func (c *Collection) GetAllDocIDs( ctx context.Context, - identity immutable.Option[string], ) (<-chan client.DocIDResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols @@ -461,8 +423,6 @@ func (c *Collection) GetAllDocIDs( c.http.setDefaultHeaders(req) - addIdentityToAuthHeaderIfExists(req, identity) - res, err := c.http.client.Do(req) if err != nil { return nil, err diff --git a/http/handler.go b/http/handler.go index e6d83dbdd3..80afcc5a3e 100644 --- a/http/handler.go +++ b/http/handler.go @@ -81,6 +81,7 @@ func NewHandler(db client.DB) (*Handler, error) { r.Use( ApiMiddleware(db, txs), TransactionMiddleware, + IdentityMiddleware, ) r.Handle("/*", router) }) diff --git a/http/handler_acp.go b/http/handler_acp.go index b754223c91..c3c5985c71 100644 --- a/http/handler_acp.go +++ b/http/handler_acp.go @@ -11,11 +11,11 @@ package http import ( + "io" "net/http" "github.com/getkin/kin-openapi/openapi3" - "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" ) @@ -28,22 +28,15 @@ func (s *acpHandler) AddPolicy(rw http.ResponseWriter, req *http.Request) { return } - var addPolicyRequest AddPolicyRequest - if err := requestJSON(req, &addPolicyRequest); err != nil { + policyBytes, err := io.ReadAll(req.Body) + if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - identity := getIdentityFromAuthHeader(req) - if !identity.HasValue() { - responseJSON(rw, http.StatusBadRequest, errorResponse{acp.ErrPolicyCreatorMustNotBeEmpty}) - return - } - addPolicyResult, err := db.AddPolicy( req.Context(), - identity.Value(), - addPolicyRequest.Policy, + string(policyBytes), ) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) @@ -60,13 +53,10 @@ func (h *acpHandler) bindRoutes(router *Router) { errorResponse := &openapi3.ResponseRef{ Ref: "#/components/responses/error", } - acpAddPolicySchema := &openapi3.SchemaRef{ - Ref: "#/components/schemas/add_policy_request", - } acpAddPolicyRequest := openapi3.NewRequestBody(). WithRequired(true). - WithJSONSchemaRef(acpAddPolicySchema) + WithContent(openapi3.NewContentWithSchema(openapi3.NewStringSchema(), []string{"text/plain"})) acpAddPolicy := openapi3.NewOperation() acpAddPolicy.OperationID = "add policy" diff --git a/http/handler_ccip.go b/http/handler_ccip.go index dfe8a66083..01597377e2 100644 --- a/http/handler_ccip.go +++ b/http/handler_ccip.go @@ -60,8 +60,7 @@ func (c *ccipHandler) ExecCCIP(rw http.ResponseWriter, req *http.Request) { return } - identity := getIdentityFromAuthHeader(req) - result := store.ExecRequest(req.Context(), identity, request.Query) + result := store.ExecRequest(req.Context(), request.Query) if result.Pub != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{ErrStreamingNotSupported}) return diff --git a/http/handler_ccip_test.go b/http/handler_ccip_test.go index 37e0da951f..2a2cc4f077 100644 --- a/http/handler_ccip_test.go +++ b/http/handler_ccip_test.go @@ -25,7 +25,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/db" @@ -207,7 +206,7 @@ func setupDatabase(t *testing.T) client.DB { doc, err := client.NewDocFromJSON([]byte(`{"name": "bob"}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) return cdb diff --git a/http/handler_collection.go b/http/handler_collection.go index 8b7f0cf64c..745a05740b 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -47,8 +47,6 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { return } - identity := getIdentityFromAuthHeader(req) - switch { case client.IsJSONArray(data): docList, err := client.NewDocsFromJSON(data, col.Schema()) @@ -57,7 +55,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { return } - if err := col.CreateMany(req.Context(), identity, docList); err != nil { + if err := col.CreateMany(req.Context(), docList); err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } @@ -68,7 +66,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - if err := col.Create(req.Context(), identity, doc); err != nil { + if err := col.Create(req.Context(), doc); err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } @@ -85,11 +83,9 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request return } - identity := getIdentityFromAuthHeader(req) - switch { case request.Filter != nil: - result, err := col.DeleteWith(req.Context(), identity, request.Filter) + result, err := col.DeleteWith(req.Context(), request.Filter) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -101,7 +97,7 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := col.DeleteWith(req.Context(), identity, docID) + result, err := col.DeleteWith(req.Context(), docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -117,7 +113,7 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request } docIDs = append(docIDs, docID) } - result, err := col.DeleteWith(req.Context(), identity, docIDs) + result, err := col.DeleteWith(req.Context(), docIDs) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -137,11 +133,9 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request return } - identity := getIdentityFromAuthHeader(req) - switch { case request.Filter != nil: - result, err := col.UpdateWith(req.Context(), identity, request.Filter, request.Updater) + result, err := col.UpdateWith(req.Context(), request.Filter, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -153,7 +147,7 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := col.UpdateWith(req.Context(), identity, docID, request.Updater) + result, err := col.UpdateWith(req.Context(), docID, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -169,7 +163,7 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request } docIDs = append(docIDs, docID) } - result, err := col.UpdateWith(req.Context(), identity, docIDs, request.Updater) + result, err := col.UpdateWith(req.Context(), docIDs, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -189,9 +183,7 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { return } - identity := getIdentityFromAuthHeader(req) - - doc, err := col.Get(req.Context(), identity, docID, true) + doc, err := col.Get(req.Context(), docID, true) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -211,7 +203,7 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - err = col.Update(req.Context(), identity, doc) + err = col.Update(req.Context(), doc) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -228,9 +220,7 @@ func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { return } - identity := getIdentityFromAuthHeader(req) - - _, err = col.Delete(req.Context(), identity, docID) + _, err = col.Delete(req.Context(), docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -248,9 +238,7 @@ func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { return } - identity := getIdentityFromAuthHeader(req) - - doc, err := col.Get(req.Context(), identity, docID, showDeleted) + doc, err := col.Get(req.Context(), docID, showDeleted) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -283,9 +271,7 @@ func (s *collectionHandler) GetAllDocIDs(rw http.ResponseWriter, req *http.Reque return } - identity := getIdentityFromAuthHeader(req) - - docIDsResult, err := col.GetAllDocIDs(req.Context(), identity) + docIDsResult, err := col.GetAllDocIDs(req.Context()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return diff --git a/http/handler_store.go b/http/handler_store.go index c71e108818..521aa13775 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -312,8 +312,7 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { return } - identity := getIdentityFromAuthHeader(req) - result := store.ExecRequest(req.Context(), identity, request.Query) + result := store.ExecRequest(req.Context(), request.Query) if result.Pub == nil { responseJSON(rw, http.StatusOK, GraphQLResponse{result.GQL.Data, result.GQL.Errors}) diff --git a/http/http_client.go b/http/http_client.go index 5bcda30dcd..f8e63fe70a 100644 --- a/http/http_client.go +++ b/http/http_client.go @@ -47,7 +47,11 @@ func (c *httpClient) setDefaultHeaders(req *http.Request) { txn, ok := db.TryGetContextTxn(req.Context()) if ok { - req.Header.Set(TX_HEADER_NAME, fmt.Sprintf("%d", txn.ID())) + req.Header.Set(txHeaderName, fmt.Sprintf("%d", txn.ID())) + } + id := db.GetContextIdentity(req.Context()) + if id.HasValue() { + req.Header.Add(authHeaderName, authSchemaPrefix+id.Value().String()) } } diff --git a/http/middleware.go b/http/middleware.go index 674921fd73..4655868373 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -21,12 +21,23 @@ import ( "github.com/go-chi/cors" "golang.org/x/exp/slices" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db" ) -const TX_HEADER_NAME = "x-defradb-tx" +const ( + // txHeaderName is the name of the transaction header. + // This header should contain a valid transaction id. + txHeaderName = "x-defradb-tx" + // authHeaderName is the name of the authorization header. + // This header should contain an ACP identity. + authHeaderName = "Authorization" + // Using Basic right now, but this will soon change to 'Bearer' as acp authentication + // gets implemented: https://github.com/sourcenetwork/defradb/issues/2017 + authSchemaPrefix = "Basic " +) type contextKey string @@ -74,7 +85,7 @@ func TransactionMiddleware(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { txs := req.Context().Value(txsContextKey).(*sync.Map) - txValue := req.Header.Get(TX_HEADER_NAME) + txValue := req.Header.Get(txHeaderName) if txValue == "" { next.ServeHTTP(rw, req) return @@ -112,3 +123,23 @@ func CollectionMiddleware(next http.Handler) http.Handler { next.ServeHTTP(rw, req.WithContext(ctx)) }) } + +func IdentityMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + authHeader := req.Header.Get(authHeaderName) + if authHeader == "" { + next.ServeHTTP(rw, req) + return + } + + identity := strings.TrimPrefix(authHeader, authSchemaPrefix) + // If expected schema prefix was not found, or empty, then assume no identity. + if identity == authHeader || identity == "" { + next.ServeHTTP(rw, req) + return + } + + ctx := db.SetContextIdentity(req.Context(), acpIdentity.New(identity)) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) +} diff --git a/http/openapi.go b/http/openapi.go index 9e1f58c854..698a88796e 100644 --- a/http/openapi.go +++ b/http/openapi.go @@ -40,7 +40,6 @@ var openApiSchemas = map[string]any{ "ccip_response": &CCIPResponse{}, "patch_schema_request": &patchSchemaRequest{}, "add_view_request": &addViewRequest{}, - "add_policy_request": &AddPolicyRequest{}, "migrate_request": &migrateRequest{}, "set_migration_request": &setMigrationRequest{}, } diff --git a/http/utils.go b/http/utils.go index 97d9a9181c..a67afef476 100644 --- a/http/utils.go +++ b/http/utils.go @@ -15,22 +15,11 @@ import ( "fmt" "io" "net/http" - "strings" - "github.com/sourcenetwork/immutable" - - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore/badger/v4" ) -// Using Basic right now, but this will soon change to 'Bearer' as acp authentication -// gets implemented: https://github.com/sourcenetwork/defradb/issues/2017 -const authSchemaPrefix = "Basic " - -// Name of authorization header -const authHeaderName = "Authorization" - func requestJSON(req *http.Request, out any) error { data, err := io.ReadAll(req.Body) if err != nil { @@ -55,38 +44,3 @@ func parseError(msg any) error { return fmt.Errorf("%s", msg) } } - -// addIdentityToAuthHeader adds the identity to auth header as it must always exist. -func addIdentityToAuthHeader(req *http.Request, identity string) { - // Create a bearer that will get added to authorization header. - bearerWithIdentity := authSchemaPrefix + identity - - // Add the authorization header with the bearer containing identity. - req.Header.Add(authHeaderName, bearerWithIdentity) -} - -// addIdentityToAuthHeaderIfExists adds the identity to auth header if it exsits, otherwise does nothing. -func addIdentityToAuthHeaderIfExists(req *http.Request, identity immutable.Option[string]) { - // Do nothing if there is no identity to add. - if !identity.HasValue() { - return - } - addIdentityToAuthHeader(req, identity.Value()) -} - -// getIdentityFromAuthHeader tries to get the identity from the auth header, if it is found -// with the expecte auth schema then it is returned, otherwise no identity is returned. -func getIdentityFromAuthHeader(req *http.Request) immutable.Option[string] { - authHeader := req.Header.Get(authHeaderName) - if authHeader == "" { - return acpIdentity.NoIdentity - } - - identity := strings.TrimPrefix(authHeader, authSchemaPrefix) - // If expected schema prefix was not found, or empty, then assume no identity. - if identity == authHeader || identity == "" { - return acpIdentity.NoIdentity - } - - return acpIdentity.NewIdentity(identity) -} diff --git a/lens/fetcher.go b/lens/fetcher.go index f4895a0fd7..5477b948b5 100644 --- a/lens/fetcher.go +++ b/lens/fetcher.go @@ -19,6 +19,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" @@ -61,7 +62,7 @@ func NewFetcher(source fetcher.Fetcher, registry client.LensRegistry) fetcher.Fe func (f *lensedFetcher) Init( ctx context.Context, - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], txn datastore.Txn, acp immutable.Option[acp.ACP], col client.Collection, diff --git a/net/client_test.go b/net/client_test.go index 3be892e3f2..89c26e06b5 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -18,7 +18,6 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/events" ) @@ -114,12 +113,12 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "test"}`), col.Schema()) require.NoError(t, err) - err = col.Save(ctx, acpIdentity.NoIdentity, doc) + err = col.Save(ctx, doc) require.NoError(t, err) col, err = n2.db.GetCollectionByName(ctx, "User") require.NoError(t, err) - err = col.Save(ctx, acpIdentity.NoIdentity, doc) + err = col.Save(ctx, doc) require.NoError(t, err) cid, err := createCID(doc) diff --git a/net/dag_test.go b/net/dag_test.go index 5229e68e93..ddd9e9aab3 100644 --- a/net/dag_test.go +++ b/net/dag_test.go @@ -22,7 +22,6 @@ import ( mh "github.com/multiformats/go-multihash" "github.com/stretchr/testify/require" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/merkle/clock" @@ -173,7 +172,7 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) txn1, _ := db1.NewTxn(ctx, false) diff --git a/net/peer_collection.go b/net/peer_collection.go index d8d27b361d..8bf7ee337f 100644 --- a/net/peer_collection.go +++ b/net/peer_collection.go @@ -16,7 +16,6 @@ import ( dsq "github.com/ipfs/go-datastore/query" "github.com/sourcenetwork/immutable" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db" @@ -25,16 +24,19 @@ import ( const marker = byte(0xff) func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) error { - txn, err := p.db.NewTxn(p.ctx, false) + txn, err := p.db.NewTxn(ctx, false) if err != nil { return err } - defer txn.Discard(p.ctx) + defer txn.Discard(ctx) + + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + // ctx = db.SetContextIdentity(ctx, identity) + ctx = db.SetContextTxn(ctx, txn) // first let's make sure the collections actually exists storeCollections := []client.Collection{} for _, col := range collectionIDs { - ctx = db.SetContextTxn(ctx, txn) storeCol, err := p.db.GetCollections( ctx, client.CollectionFetchOptions{ @@ -82,8 +84,7 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er // from the pubsub topics to avoid receiving duplicate events. removedTopics := []string{} for _, col := range storeCollections { - // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 - keyChan, err := col.GetAllDocIDs(p.ctx, acpIdentity.NoIdentity) + keyChan, err := col.GetAllDocIDs(ctx) if err != nil { return err } @@ -96,7 +97,7 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er } } - if err = txn.Commit(p.ctx); err != nil { + if err = txn.Commit(ctx); err != nil { err = p.rollbackRemovePubSubTopics(removedTopics, err) return p.rollbackAddPubSubTopics(addedTopics, err) } @@ -105,16 +106,19 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er } func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) error { - txn, err := p.db.NewTxn(p.ctx, false) + txn, err := p.db.NewTxn(ctx, false) if err != nil { return err } - defer txn.Discard(p.ctx) + defer txn.Discard(ctx) + + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + // ctx = db.SetContextIdentity(ctx, identity) + ctx = db.SetContextTxn(ctx, txn) // first let's make sure the collections actually exists storeCollections := []client.Collection{} for _, col := range collectionIDs { - ctx = db.SetContextTxn(ctx, txn) storeCol, err := p.db.GetCollections( ctx, client.CollectionFetchOptions{ @@ -154,8 +158,7 @@ func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) // to the pubsub topics. addedTopics := []string{} for _, col := range storeCollections { - // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 - keyChan, err := col.GetAllDocIDs(p.ctx, acpIdentity.NoIdentity) + keyChan, err := col.GetAllDocIDs(ctx) if err != nil { return err } @@ -168,7 +171,7 @@ func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) } } - if err = txn.Commit(p.ctx); err != nil { + if err = txn.Commit(ctx); err != nil { err = p.rollbackAddPubSubTopics(addedTopics, err) return p.rollbackRemovePubSubTopics(removedTopics, err) } diff --git a/net/peer_replicator.go b/net/peer_replicator.go index 1dd3c47cf4..ce5f7e23b6 100644 --- a/net/peer_replicator.go +++ b/net/peer_replicator.go @@ -18,7 +18,6 @@ import ( "github.com/libp2p/go-libp2p/core/peer" "github.com/libp2p/go-libp2p/core/peerstore" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db" @@ -41,7 +40,8 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { return err } - // set transaction for all operations + // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 + // ctx = db.SetContextIdentity(ctx, identity) ctx = db.SetContextTxn(ctx, txn) var collections []client.Collection @@ -112,8 +112,7 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { // push all collection documents to the replicator peer for _, col := range added { - // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 - keysCh, err := col.GetAllDocIDs(ctx, acpIdentity.NoIdentity) + keysCh, err := col.GetAllDocIDs(ctx) if err != nil { return NewErrReplicatorDocID(err, col.Name().Value(), rep.Info.ID) } diff --git a/net/peer_test.go b/net/peer_test.go index 3350e026ba..ba06b40447 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -171,7 +171,7 @@ func TestNewPeer_WithExistingTopic_TopicAlreadyExistsError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) h, err := libp2p.New() @@ -396,7 +396,7 @@ func TestSetReplicator_NoError(t *testing.T) { // TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 func TestSetReplicatorWithACollectionSpecifiedThatHasPolicy_ReturnError(t *testing.T) { ctx := context.Background() - db, n := newTestNode(ctx, t) + d, n := newTestNode(ctx, t) defer n.Close() policy := ` @@ -415,7 +415,8 @@ func TestSetReplicatorWithACollectionSpecifiedThatHasPolicy_ReturnError(t *testi types: - actor ` - policyResult, err := db.AddPolicy(ctx, "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", policy) + ctx = db.SetContextIdentity(ctx, acpIdentity.New("cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969")) + policyResult, err := d.AddPolicy(ctx, policy) policyID := policyResult.PolicyID require.NoError(t, err) require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) @@ -427,7 +428,7 @@ func TestSetReplicatorWithACollectionSpecifiedThatHasPolicy_ReturnError(t *testi } `, policyID, ) - _, err = db.AddSchema(ctx, schema) + _, err = d.AddSchema(ctx, schema) require.NoError(t, err) info, err := peer.AddrInfoFromString("/ip4/0.0.0.0/tcp/0/p2p/QmYyQSo1c1Ym7orWxLYvCrM2EmxFTANf8wXmmE7DWjhx5N") @@ -446,7 +447,7 @@ func TestSetReplicatorWithACollectionSpecifiedThatHasPolicy_ReturnError(t *testi // TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 func TestSetReplicatorWithSomeCollectionThatHasPolicyUsingAllCollectionsByDefault_ReturnError(t *testing.T) { ctx := context.Background() - db, n := newTestNode(ctx, t) + d, n := newTestNode(ctx, t) defer n.Close() policy := ` @@ -465,7 +466,8 @@ func TestSetReplicatorWithSomeCollectionThatHasPolicyUsingAllCollectionsByDefaul types: - actor ` - policyResult, err := db.AddPolicy(ctx, "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", policy) + ctx = db.SetContextIdentity(ctx, acpIdentity.New("cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969")) + policyResult, err := d.AddPolicy(ctx, policy) policyID := policyResult.PolicyID require.NoError(t, err) require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) @@ -477,7 +479,7 @@ func TestSetReplicatorWithSomeCollectionThatHasPolicyUsingAllCollectionsByDefaul } `, policyID, ) - _, err = db.AddSchema(ctx, schema) + _, err = d.AddSchema(ctx, schema) require.NoError(t, err) info, err := peer.AddrInfoFromString("/ip4/0.0.0.0/tcp/0/p2p/QmYyQSo1c1Ym7orWxLYvCrM2EmxFTANf8wXmmE7DWjhx5N") @@ -576,10 +578,10 @@ func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *test doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) - keysCh, err := col.GetAllDocIDs(ctx, acpIdentity.NoIdentity) + keysCh, err := col.GetAllDocIDs(ctx) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -804,7 +806,7 @@ func TestAddP2PCollections_WithInvalidCollectionID_NotFoundError(t *testing.T) { // TODO-ACP: ACP <> P2P https://github.com/sourcenetwork/defradb/issues/2366 func TestAddP2PCollectionsWithPermissionedCollection_Error(t *testing.T) { ctx := context.Background() - db, n := newTestNode(ctx, t) + d, n := newTestNode(ctx, t) defer n.Close() policy := ` @@ -823,7 +825,8 @@ func TestAddP2PCollectionsWithPermissionedCollection_Error(t *testing.T) { types: - actor ` - policyResult, err := db.AddPolicy(ctx, "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969", policy) + ctx = db.SetContextIdentity(ctx, acpIdentity.New("cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969")) + policyResult, err := d.AddPolicy(ctx, policy) policyID := policyResult.PolicyID require.NoError(t, err) require.Equal(t, "fc3a0a39c73949c70a79e02b8d928028e9cbcc772ba801463a6acdcf2f256cd4", policyID) @@ -835,10 +838,10 @@ func TestAddP2PCollectionsWithPermissionedCollection_Error(t *testing.T) { } `, policyID, ) - _, err = db.AddSchema(ctx, schema) + _, err = d.AddSchema(ctx, schema) require.NoError(t, err) - col, err := db.GetCollectionByName(ctx, "User") + col, err := d.GetCollectionByName(ctx, "User") require.NoError(t, err) err = n.Peer.AddP2PCollections(ctx, []string{col.SchemaRoot()}) @@ -940,7 +943,7 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) docCid, err := createCID(doc) @@ -993,7 +996,7 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.ID().String(), true) @@ -1023,7 +1026,7 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) docCid, err := createCID(doc) @@ -1076,7 +1079,7 @@ func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) docCid, err := createCID(doc) @@ -1120,7 +1123,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) docCid, err := createCID(doc) diff --git a/net/server.go b/net/server.go index 73496559cf..ebf772a8bc 100644 --- a/net/server.go +++ b/net/server.go @@ -29,7 +29,6 @@ import ( grpcpeer "google.golang.org/grpc/peer" "google.golang.org/protobuf/proto" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore/badger/v4" @@ -110,7 +109,7 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) continue } // TODO-ACP: Support ACP <> P2P - https://github.com/sourcenetwork/defradb/issues/2366 - docIDChan, err := col.GetAllDocIDs(p.ctx, acpIdentity.NoIdentity) + docIDChan, err := col.GetAllDocIDs(p.ctx) if err != nil { return nil, err } @@ -358,7 +357,7 @@ func (s *server) syncIndexedDocs( //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 // Resolve while handling acp <> secondary indexes. - oldDoc, err := col.Get(oldCtx, acpIdentity.NoIdentity, docID, false) + oldDoc, err := col.Get(oldCtx, docID, false) isNewDoc := errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) if !isNewDoc && err != nil { return err @@ -366,7 +365,7 @@ func (s *server) syncIndexedDocs( //TODO-ACP: https://github.com/sourcenetwork/defradb/issues/2365 // Resolve while handling acp <> secondary indexes. - doc, err := col.Get(ctx, acpIdentity.NoIdentity, docID, false) + doc, err := col.Get(ctx, docID, false) isDeletedDoc := errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) if !isDeletedDoc && err != nil { return err diff --git a/net/server_test.go b/net/server_test.go index 5e6eda3d1d..a1d2eeeb57 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -18,11 +18,9 @@ import ( "github.com/libp2p/go-libp2p/core/event" "github.com/libp2p/go-libp2p/core/host" rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" - "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/require" grpcpeer "google.golang.org/grpc/peer" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/errors" @@ -102,8 +100,6 @@ func (mCol *mockCollection) SchemaRoot() string { } func (mCol *mockCollection) GetAllDocIDs( ctx context.Context, - identity immutable.Option[string], - ) (<-chan client.DocIDResult, error) { return nil, mockError } @@ -140,7 +136,7 @@ func TestNewServerWithAddTopicError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) @@ -186,7 +182,7 @@ func TestNewServerWithEmitterError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) - err = col.Create(ctx, acpIdentity.NoIdentity, doc) + err = col.Create(ctx, doc) require.NoError(t, err) n.Peer.host = &mockHost{n.Peer.host} diff --git a/planner/create.go b/planner/create.go index bedb1be5d5..2918e9ee46 100644 --- a/planner/create.go +++ b/planner/create.go @@ -80,7 +80,6 @@ func (n *createNode) Next() (bool, error) { if err := n.collection.Create( n.p.ctx, - n.p.identity, n.doc, ); err != nil { return false, err diff --git a/planner/delete.go b/planner/delete.go index 87cf0994ac..41bbd15716 100644 --- a/planner/delete.go +++ b/planner/delete.go @@ -55,7 +55,6 @@ func (n *deleteNode) Next() (bool, error) { } _, err = n.collection.DeleteWithDocID( n.p.ctx, - n.p.identity, docID, ) if err != nil { diff --git a/planner/planner.go b/planner/planner.go index eca0168671..e0f1e07613 100644 --- a/planner/planner.go +++ b/planner/planner.go @@ -16,6 +16,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/acp" + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/connor" @@ -86,7 +87,7 @@ type PlanContext struct { // produce a request plan, which is run by the execution context. type Planner struct { txn datastore.Txn - identity immutable.Option[string] + identity immutable.Option[acpIdentity.Identity] acp immutable.Option[acp.ACP] db client.Store @@ -95,7 +96,7 @@ type Planner struct { func New( ctx context.Context, - identity immutable.Option[string], + identity immutable.Option[acpIdentity.Identity], acp immutable.Option[acp.ACP], db client.Store, txn datastore.Txn, diff --git a/planner/update.go b/planner/update.go index 458094d4e0..102aceb515 100644 --- a/planner/update.go +++ b/planner/update.go @@ -72,7 +72,7 @@ func (n *updateNode) Next() (bool, error) { if err != nil { return false, err } - _, err = n.collection.UpdateWithDocID(n.p.ctx, n.p.identity, docID, string(patch)) + _, err = n.collection.UpdateWithDocID(n.p.ctx, docID, string(patch)) if err != nil { return false, err } diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index 90b9cd0768..dac81d0ce2 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -22,7 +22,6 @@ import ( "github.com/sourcenetwork/badger/v4" "github.com/sourcenetwork/corelog" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/tests/bench/fixtures" @@ -171,7 +170,7 @@ func BackfillBenchmarkDB( // in place. The error check could prob use a wrap system // but its fine :). for { - if err := cols[j].Create(ctx, acpIdentity.NoIdentity, doc); err != nil && + if err := cols[j].Create(ctx, doc); err != nil && err.Error() == badger.ErrConflict.Error() { log.InfoContext( ctx, diff --git a/tests/bench/collection/utils.go b/tests/bench/collection/utils.go index b8faa0dd50..59ab0577ba 100644 --- a/tests/bench/collection/utils.go +++ b/tests/bench/collection/utils.go @@ -17,7 +17,6 @@ import ( "sync" "testing" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" benchutils "github.com/sourcenetwork/defradb/tests/bench" @@ -75,7 +74,6 @@ func runCollectionBenchGetSync(b *testing.B, for k := 0; k < numTypes; k++ { // apply op to all the related types collections[k].Get( //nolint:errcheck ctx, - acpIdentity.NoIdentity, listOfDocIDs[j][k], false, ) @@ -106,7 +104,6 @@ func runCollectionBenchGetAsync(b *testing.B, go func(ctx context.Context, col client.Collection, docID client.DocID) { col.Get( //nolint:errcheck ctx, - acpIdentity.NoIdentity, docID, false, ) @@ -184,7 +181,7 @@ func runCollectionBenchCreateMany( docs[j], _ = client.NewDocFromJSON([]byte(d[0]), collections[0].Schema()) } - collections[0].CreateMany(ctx, acpIdentity.NoIdentity, docs) //nolint:errcheck + collections[0].CreateMany(ctx, docs) //nolint:errcheck } b.StopTimer() @@ -205,7 +202,7 @@ func runCollectionBenchCreateSync(b *testing.B, docs, _ := fixture.GenerateDocs() for k := 0; k < numTypes; k++ { doc, _ := client.NewDocFromJSON([]byte(docs[k]), collections[k].Schema()) - collections[k].Create(ctx, acpIdentity.NoIdentity, doc) //nolint:errcheck + collections[k].Create(ctx, doc) //nolint:errcheck } } } @@ -244,7 +241,7 @@ func runCollectionBenchCreateAsync(b *testing.B, // create the documents for j := 0; j < numTypes; j++ { doc, _ := client.NewDocFromJSON([]byte(docs[j]), collections[j].Schema()) - collections[j].Create(ctx, acpIdentity.NoIdentity, doc) //nolint:errcheck + collections[j].Create(ctx, doc) //nolint:errcheck } wg.Done() diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index caba91836d..dec517d781 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -82,7 +82,7 @@ func runMakePlanBench( for i := 0; i < b.N; i++ { planner := planner.New( ctx, - acpIdentity.NoIdentity, + acpIdentity.None, acp.NoACP, d, txn, diff --git a/tests/bench/query/simple/utils.go b/tests/bench/query/simple/utils.go index e4604f96a0..14752e7ae2 100644 --- a/tests/bench/query/simple/utils.go +++ b/tests/bench/query/simple/utils.go @@ -17,7 +17,6 @@ import ( "strings" "testing" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" benchutils "github.com/sourcenetwork/defradb/tests/bench" @@ -71,7 +70,7 @@ func runQueryBenchGetSync( b.ResetTimer() for i := 0; i < b.N; i++ { - res := db.ExecRequest(ctx, acpIdentity.NoIdentity, query) + res := db.ExecRequest(ctx, query) if len(res.GQL.Errors) > 0 { return errors.New(fmt.Sprintf("Query error: %v", res.GQL.Errors)) } diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 2ddaf86137..b0dddff9cd 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -174,11 +174,9 @@ func (w *Wrapper) BasicExport(ctx context.Context, config *client.BackupConfig) func (w *Wrapper) AddPolicy( ctx context.Context, - creator string, policy string, ) (client.AddPolicyResult, error) { args := []string{"client", "acp", "policy", "add"} - args = append(args, "--identity", creator) args = append(args, policy) data, err := w.cmd.execute(ctx, args) @@ -394,16 +392,11 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] func (w *Wrapper) ExecRequest( ctx context.Context, - identity immutable.Option[string], query string, ) *client.RequestResult { args := []string{"client", "query"} args = append(args, query) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - result := &client.RequestResult{} stdOut, stdErr, err := w.cmd.executeStream(ctx, args) diff --git a/tests/clients/cli/wrapper_cli.go b/tests/clients/cli/wrapper_cli.go index 9076605857..cee64081d4 100644 --- a/tests/clients/cli/wrapper_cli.go +++ b/tests/clients/cli/wrapper_cli.go @@ -57,6 +57,10 @@ func (w *cliWrapper) executeStream(ctx context.Context, args []string) (io.ReadC if ok { args = append(args, "--tx", fmt.Sprintf("%d", tx.ID())) } + id := db.GetContextIdentity(ctx) + if id.HasValue() { + args = append(args, "--identity", id.Value().String()) + } args = append(args, "--url", w.address) cmd := cli.NewDefraCommand() diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index 861606a2d1..4ff8692561 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -57,7 +57,6 @@ func (c *Collection) Definition() client.CollectionDefinition { func (c *Collection) Create( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { if !c.Description().Name.HasValue() { @@ -67,10 +66,6 @@ func (c *Collection) Create( args := []string{"client", "collection", "create"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - document, err := doc.String() if err != nil { return err @@ -87,7 +82,7 @@ func (c *Collection) Create( func (c *Collection) CreateMany( ctx context.Context, - identity immutable.Option[string], + docs []*client.Document, ) error { if !c.Description().Name.HasValue() { @@ -97,10 +92,6 @@ func (c *Collection) CreateMany( args := []string{"client", "collection", "create"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - docMapList := make([]map[string]any, len(docs)) for i, doc := range docs { docMap, err := doc.ToMap() @@ -127,7 +118,7 @@ func (c *Collection) CreateMany( func (c *Collection) Update( ctx context.Context, - identity immutable.Option[string], + doc *client.Document, ) error { if !c.Description().Name.HasValue() { @@ -137,10 +128,6 @@ func (c *Collection) Update( args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - args = append(args, "--docID", doc.ID().String()) document, err := doc.ToJSONPatch() @@ -159,25 +146,24 @@ func (c *Collection) Update( func (c *Collection) Save( ctx context.Context, - identity immutable.Option[string], doc *client.Document, ) error { - _, err := c.Get(ctx, identity, doc.ID(), true) + _, err := c.Get(ctx, doc.ID(), true) if err == nil { - return c.Update(ctx, identity, doc) + return c.Update(ctx, doc) } if errors.Is(err, client.ErrDocumentNotFoundOrNotAuthorized) { - return c.Create(ctx, identity, doc) + return c.Create(ctx, doc) } return err } func (c *Collection) Delete( ctx context.Context, - identity immutable.Option[string], + docID client.DocID, ) (bool, error) { - res, err := c.DeleteWithDocID(ctx, identity, docID) + res, err := c.DeleteWithDocID(ctx, docID) if err != nil { return false, err } @@ -186,10 +172,10 @@ func (c *Collection) Delete( func (c *Collection) Exists( ctx context.Context, - identity immutable.Option[string], + docID client.DocID, ) (bool, error) { - _, err := c.Get(ctx, identity, docID, false) + _, err := c.Get(ctx, docID, false) if err != nil { return false, err } @@ -198,17 +184,17 @@ func (c *Collection) Exists( func (c *Collection) UpdateWith( ctx context.Context, - identity immutable.Option[string], + target any, updater string, ) (*client.UpdateResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, identity, t, updater) + return c.UpdateWithFilter(ctx, t, updater) case client.DocID: - return c.UpdateWithDocID(ctx, identity, t, updater) + return c.UpdateWithDocID(ctx, t, updater) case []client.DocID: - return c.UpdateWithDocIDs(ctx, identity, t, updater) + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -231,7 +217,7 @@ func (c *Collection) updateWith( func (c *Collection) UpdateWithFilter( ctx context.Context, - identity immutable.Option[string], + filter any, updater string, ) (*client.UpdateResult, error) { @@ -242,10 +228,6 @@ func (c *Collection) UpdateWithFilter( args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - args = append(args, "--updater", updater) filterJSON, err := json.Marshal(filter) @@ -259,7 +241,7 @@ func (c *Collection) UpdateWithFilter( func (c *Collection) UpdateWithDocID( ctx context.Context, - identity immutable.Option[string], + docID client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -270,10 +252,6 @@ func (c *Collection) UpdateWithDocID( args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - args = append(args, "--docID", docID.String()) args = append(args, "--updater", updater) @@ -282,7 +260,7 @@ func (c *Collection) UpdateWithDocID( func (c *Collection) UpdateWithDocIDs( ctx context.Context, - identity immutable.Option[string], + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { @@ -293,10 +271,6 @@ func (c *Collection) UpdateWithDocIDs( args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - args = append(args, "--updater", updater) strDocIDs := make([]string, len(docIDs)) @@ -310,16 +284,16 @@ func (c *Collection) UpdateWithDocIDs( func (c *Collection) DeleteWith( ctx context.Context, - identity immutable.Option[string], + target any, ) (*client.DeleteResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, identity, t) + return c.DeleteWithFilter(ctx, t) case client.DocID: - return c.DeleteWithDocID(ctx, identity, t) + return c.DeleteWithDocID(ctx, t) case []client.DocID: - return c.DeleteWithDocIDs(ctx, identity, t) + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -342,7 +316,7 @@ func (c *Collection) deleteWith( func (c *Collection) DeleteWithFilter( ctx context.Context, - identity immutable.Option[string], + filter any, ) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { @@ -352,10 +326,6 @@ func (c *Collection) DeleteWithFilter( args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - filterJSON, err := json.Marshal(filter) if err != nil { return nil, err @@ -367,7 +337,7 @@ func (c *Collection) DeleteWithFilter( func (c *Collection) DeleteWithDocID( ctx context.Context, - identity immutable.Option[string], + docID client.DocID, ) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { @@ -377,10 +347,6 @@ func (c *Collection) DeleteWithDocID( args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - args = append(args, "--docID", docID.String()) return c.deleteWith(ctx, args) @@ -388,7 +354,7 @@ func (c *Collection) DeleteWithDocID( func (c *Collection) DeleteWithDocIDs( ctx context.Context, - identity immutable.Option[string], + docIDs []client.DocID, ) (*client.DeleteResult, error) { if !c.Description().Name.HasValue() { @@ -398,10 +364,6 @@ func (c *Collection) DeleteWithDocIDs( args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - strDocIDs := make([]string, len(docIDs)) for i, v := range docIDs { strDocIDs[i] = v.String() @@ -413,7 +375,6 @@ func (c *Collection) DeleteWithDocIDs( func (c *Collection) Get( ctx context.Context, - identity immutable.Option[string], docID client.DocID, showDeleted bool, ) (*client.Document, error) { @@ -424,10 +385,6 @@ func (c *Collection) Get( args := []string{"client", "collection", "get"} args = append(args, "--name", c.Description().Name.Value()) - if identity.HasValue() { - args = append(args, "--identity", identity.Value()) - } - args = append(args, docID.String()) if showDeleted { @@ -449,7 +406,7 @@ func (c *Collection) Get( func (c *Collection) GetAllDocIDs( ctx context.Context, - identity immutable.Option[string], + ) (<-chan client.DocIDResult, error) { if !c.Description().Name.HasValue() { return nil, client.ErrOperationNotPermittedOnNamelessCols diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index 51fe7ae66b..51911c3321 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -100,10 +100,9 @@ func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.Collec func (w *Wrapper) AddPolicy( ctx context.Context, - creator string, policy string, ) (client.AddPolicyResult, error) { - return w.client.AddPolicy(ctx, creator, policy) + return w.client.AddPolicy(ctx, policy) } func (w *Wrapper) PatchSchema( @@ -171,10 +170,9 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] func (w *Wrapper) ExecRequest( ctx context.Context, - identity immutable.Option[string], query string, ) *client.RequestResult { - return w.client.ExecRequest(ctx, identity, query) + return w.client.ExecRequest(ctx, query) } func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { diff --git a/tests/gen/cli/gendocs.go b/tests/gen/cli/gendocs.go index 068af9b25a..58152c721a 100644 --- a/tests/gen/cli/gendocs.go +++ b/tests/gen/cli/gendocs.go @@ -19,7 +19,6 @@ import ( "github.com/spf13/cobra" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/tests/gen" @@ -122,7 +121,7 @@ func saveBatchToCollections( for colName, colDocs := range colDocsMap { for _, col := range collections { if col.Description().Name.Value() == colName { - err := col.CreateMany(ctx, acpIdentity.NoIdentity, colDocs) + err := col.CreateMany(ctx, colDocs) if err != nil { return err } diff --git a/tests/integration/acp.go b/tests/integration/acp.go index 8c4969e228..eb9bdc8fbe 100644 --- a/tests/integration/acp.go +++ b/tests/integration/acp.go @@ -13,6 +13,9 @@ package tests import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/require" + + acpIdentity "github.com/sourcenetwork/defradb/acp/identity" + "github.com/sourcenetwork/defradb/db" ) // AddPolicy will attempt to add the given policy using DefraDB's ACP system. @@ -25,8 +28,8 @@ type AddPolicy struct { // The raw policy string. Policy string - // The policy creator, i.e. actor creating the policy. - Creator string + // The policy creator identity, i.e. actor creating the policy. + Identity string // The expected policyID generated based on the Policy loaded in to the ACP system. ExpectedPolicyID string @@ -49,11 +52,8 @@ func addPolicyACP( } for _, node := range getNodes(action.NodeID, s.nodes) { - policyResult, err := node.AddPolicy( - s.ctx, - action.Creator, - action.Policy, - ) + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) + policyResult, err := node.AddPolicy(ctx, action.Policy) if err == nil { require.Equal(s.t, action.ExpectedError, "") diff --git a/tests/integration/acp/add_policy/basic_test.go b/tests/integration/acp/add_policy/basic_test.go index 47aa351e77..fdf53f02cc 100644 --- a/tests/integration/acp/add_policy/basic_test.go +++ b/tests/integration/acp/add_policy/basic_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_BasicYAML_ValidPolicyID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a basic policy that satisfies minimum DPI requirements @@ -61,7 +61,7 @@ func TestACP_AddPolicy_BasicJSON_ValidPolicyID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` { diff --git a/tests/integration/acp/add_policy/fixture.go b/tests/integration/acp/add_policy/fixture.go index 8fc2edb7cd..97ae5e6ff6 100644 --- a/tests/integration/acp/add_policy/fixture.go +++ b/tests/integration/acp/add_policy/fixture.go @@ -14,5 +14,5 @@ import ( acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" ) -var actor1Signature = acpUtils.Actor1Signature -var actor2Signature = acpUtils.Actor2Signature +var actor1Identity = acpUtils.Actor1Identity +var actor2Identity = acpUtils.Actor2Identity diff --git a/tests/integration/acp/add_policy/with_empty_args_test.go b/tests/integration/acp/add_policy/with_empty_args_test.go index de93019a20..dc530d95b0 100644 --- a/tests/integration/acp/add_policy/with_empty_args_test.go +++ b/tests/integration/acp/add_policy/with_empty_args_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_EmptyPolicyData_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: "", @@ -42,7 +42,7 @@ func TestACP_AddPolicy_EmptyPolicyCreator_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: "", + Identity: "", Policy: ` description: a basic policy that satisfies minimum DPI requirements @@ -80,7 +80,7 @@ func TestACP_AddPolicy_EmptyCreatorAndPolicyArgs_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: "", + Identity: "", Policy: "", diff --git a/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go b/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go index 6606b62af5..cd84e3d910 100644 --- a/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go +++ b/tests/integration/acp/add_policy/with_extra_perms_and_relations_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_ExtraPermissionsAndExtraRelations_ValidPolicyID(t *testin Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_extra_perms_test.go b/tests/integration/acp/add_policy/with_extra_perms_test.go index 0fbcc842c0..8c13555c8d 100644 --- a/tests/integration/acp/add_policy/with_extra_perms_test.go +++ b/tests/integration/acp/add_policy/with_extra_perms_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_ExtraPermissions_ValidPolicyID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -62,7 +62,7 @@ func TestACP_AddPolicy_ExtraDuplicatePermissions_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_extra_relations_test.go b/tests/integration/acp/add_policy/with_extra_relations_test.go index b8f568b5e6..d3e4308c24 100644 --- a/tests/integration/acp/add_policy/with_extra_relations_test.go +++ b/tests/integration/acp/add_policy/with_extra_relations_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_ExtraRelations_ValidPolicyID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -66,7 +66,7 @@ func TestACP_AddPolicy_ExtraDuplicateRelations_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go b/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go index 1c7a29c148..2e56670add 100644 --- a/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go +++ b/tests/integration/acp/add_policy/with_invalid_creator_arg_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_InvalidCreatorIdentityWithValidPolicy_Error(t *testing.T) Actions: []any{ testUtils.AddPolicy{ - Creator: "invalid", + Identity: "invalid", Policy: ` description: a basic policy that satisfies minimum DPI requirements @@ -62,7 +62,7 @@ func TestACP_AddPolicy_InvalidCreatorIdentityWithEmptyPolicy_Error(t *testing.T) Actions: []any{ testUtils.AddPolicy{ - Creator: "invalid", + Identity: "invalid", Policy: "", diff --git a/tests/integration/acp/add_policy/with_invalid_relations_test.go b/tests/integration/acp/add_policy/with_invalid_relations_test.go index 9184d69426..37945509a5 100644 --- a/tests/integration/acp/add_policy/with_invalid_relations_test.go +++ b/tests/integration/acp/add_policy/with_invalid_relations_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_NoRelations_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -57,7 +57,7 @@ func TestACP_AddPolicy_NoRelationsLabel_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_invalid_required_relation_test.go b/tests/integration/acp/add_policy/with_invalid_required_relation_test.go index 3f3f3c4db3..d8982703cc 100644 --- a/tests/integration/acp/add_policy/with_invalid_required_relation_test.go +++ b/tests/integration/acp/add_policy/with_invalid_required_relation_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_MissingRequiredOwnerRelation_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -60,7 +60,7 @@ func TestACP_AddPolicy_DuplicateOwnerRelation_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_invalid_resource_test.go b/tests/integration/acp/add_policy/with_invalid_resource_test.go index 499ff146d1..2fc311102d 100644 --- a/tests/integration/acp/add_policy/with_invalid_resource_test.go +++ b/tests/integration/acp/add_policy/with_invalid_resource_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_OneResourceThatIsEmpty_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_managed_relation_test.go b/tests/integration/acp/add_policy/with_managed_relation_test.go index 74f89e365a..d80c5b1c05 100644 --- a/tests/integration/acp/add_policy/with_managed_relation_test.go +++ b/tests/integration/acp/add_policy/with_managed_relation_test.go @@ -22,7 +22,7 @@ func TestACP_AddPolicy_WithRelationManagingOtherRelation_ValidPolicyID(t *testin Description: "Test acp, where a relation is managing another relation, valid policy id", Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy with admin relation managing reader relation diff --git a/tests/integration/acp/add_policy/with_multi_policies_test.go b/tests/integration/acp/add_policy/with_multi_policies_test.go index 52b7333d34..6fbbfb2c39 100644 --- a/tests/integration/acp/add_policy/with_multi_policies_test.go +++ b/tests/integration/acp/add_policy/with_multi_policies_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_AddMultipleDifferentPolicies_ValidPolicyIDs(t *testing.T) Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -50,7 +50,7 @@ func TestACP_AddPolicy_AddMultipleDifferentPolicies_ValidPolicyIDs(t *testing.T) }, testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: another policy @@ -95,7 +95,7 @@ func TestACP_AddPolicy_AddMultipleDifferentPoliciesInDifferentFmts_ValidPolicyID Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` { @@ -129,7 +129,7 @@ func TestACP_AddPolicy_AddMultipleDifferentPoliciesInDifferentFmts_ValidPolicyID }, testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: another policy @@ -194,7 +194,7 @@ func TestACP_AddPolicy_AddDuplicatePolicyByOtherCreator_ValidPolicyIDs(t *testin Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: policyUsedByBoth, @@ -202,7 +202,7 @@ func TestACP_AddPolicy_AddDuplicatePolicyByOtherCreator_ValidPolicyIDs(t *testin }, testUtils.AddPolicy{ - Creator: actor2Signature, + Identity: actor2Identity, Policy: policyUsedByBoth, @@ -221,7 +221,7 @@ func TestACP_AddPolicy_AddMultipleDuplicatePolicies_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -248,7 +248,7 @@ func TestACP_AddPolicy_AddMultipleDuplicatePolicies_Error(t *testing.T) { }, testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -286,7 +286,7 @@ func TestACP_AddPolicy_AddMultipleDuplicatePoliciesDifferentFmts_Error(t *testin Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -312,7 +312,7 @@ func TestACP_AddPolicy_AddMultipleDuplicatePoliciesDifferentFmts_Error(t *testin }, testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` { diff --git a/tests/integration/acp/add_policy/with_multiple_resources_test.go b/tests/integration/acp/add_policy/with_multiple_resources_test.go index c939600663..6d6c890452 100644 --- a/tests/integration/acp/add_policy/with_multiple_resources_test.go +++ b/tests/integration/acp/add_policy/with_multiple_resources_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_MultipleResources_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -77,7 +77,7 @@ func TestACP_AddPolicy_MultipleResourcesUsingRelationDefinedInOther_Error(t *tes Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -128,7 +128,7 @@ func TestACP_AddPolicy_SecondResourcesMissingRequiredOwner_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_no_perms_test.go b/tests/integration/acp/add_policy/with_no_perms_test.go index 0f55851468..156788ca45 100644 --- a/tests/integration/acp/add_policy/with_no_perms_test.go +++ b/tests/integration/acp/add_policy/with_no_perms_test.go @@ -31,7 +31,7 @@ func TestACP_AddPolicy_NoPermissionsOnlyOwner_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -65,7 +65,7 @@ func TestACP_AddPolicy_NoPermissionsMultiRelations_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -102,7 +102,7 @@ func TestACP_AddPolicy_NoPermissionsLabelOnlyOwner_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -134,7 +134,7 @@ func TestACP_AddPolicy_NoPermissionsLabelMultiRelations_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_no_resources_test.go b/tests/integration/acp/add_policy/with_no_resources_test.go index 861a77c64e..6b4097584a 100644 --- a/tests/integration/acp/add_policy/with_no_resources_test.go +++ b/tests/integration/acp/add_policy/with_no_resources_test.go @@ -25,7 +25,7 @@ func TestACP_AddPolicy_NoResource_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -53,7 +53,7 @@ func TestACP_AddPolicy_NoResourceLabel_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -79,7 +79,7 @@ func TestACP_AddPolicy_PolicyWithOnlySpace_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: " ", diff --git a/tests/integration/acp/add_policy/with_perm_expr_test.go b/tests/integration/acp/add_policy/with_perm_expr_test.go index 7af2c8e690..177de98ebe 100644 --- a/tests/integration/acp/add_policy/with_perm_expr_test.go +++ b/tests/integration/acp/add_policy/with_perm_expr_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithMinus_ValidID(t *testi Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -64,7 +64,7 @@ func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithMinusNoSpace_ValidID(t Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go b/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go index c61a6e0b9c..7c5033d700 100644 --- a/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go +++ b/tests/integration/acp/add_policy/with_perm_invalid_expr_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_EmptyExpressionInPermission_Error(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -63,7 +63,7 @@ func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithInocorrectSymbol_Error Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -103,7 +103,7 @@ func TestACP_AddPolicy_PermissionExprWithOwnerInTheEndWithInocorrectSymbolNoSpac Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_permissionless_owner_test.go b/tests/integration/acp/add_policy/with_permissionless_owner_test.go index 6496a1dec3..0fda8a7468 100644 --- a/tests/integration/acp/add_policy/with_permissionless_owner_test.go +++ b/tests/integration/acp/add_policy/with_permissionless_owner_test.go @@ -30,7 +30,7 @@ func TestACP_AddPolicy_PermissionlessOwnerWrite_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -70,7 +70,7 @@ func TestACP_AddPolicy_PermissionlessOwnerRead_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -110,7 +110,7 @@ func TestACP_AddPolicy_PermissionlessOwnerReadWrite_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/add_policy/with_unused_relations_test.go b/tests/integration/acp/add_policy/with_unused_relations_test.go index faa7658e5e..fbcec10755 100644 --- a/tests/integration/acp/add_policy/with_unused_relations_test.go +++ b/tests/integration/acp/add_policy/with_unused_relations_test.go @@ -23,7 +23,7 @@ func TestACP_AddPolicy_UnusedRelation_ValidID(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/fixture.go b/tests/integration/acp/fixture.go index ea0ccfc09d..ae05f780a4 100644 --- a/tests/integration/acp/fixture.go +++ b/tests/integration/acp/fixture.go @@ -10,5 +10,7 @@ package test_acp -var Actor1Signature = "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969" -var Actor2Signature = "cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll" +var ( + Actor1Identity = "cosmos1zzg43wdrhmmk89z3pmejwete2kkd4a3vn7w969" + Actor2Identity = "cosmos1x25hhksxhu86r45hqwk28dd70qzux3262hdrll" +) diff --git a/tests/integration/acp/index/create_test.go b/tests/integration/acp/index/create_test.go index 4a1d4c6fac..f2c9b193a7 100644 --- a/tests/integration/acp/index/create_test.go +++ b/tests/integration/acp/index/create_test.go @@ -27,7 +27,7 @@ func TestACP_IndexCreateWithSeparateRequest_OnCollectionWithPolicy_ReturnError(t testUtils.AddPolicy{ - Creator: acpUtils.Actor1Signature, + Identity: acpUtils.Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -109,7 +109,7 @@ func TestACP_IndexCreateWithDirective_OnCollectionWithPolicy_ReturnError(t *test testUtils.AddPolicy{ - Creator: acpUtils.Actor1Signature, + Identity: acpUtils.Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource diff --git a/tests/integration/acp/p2p/replicator_test.go b/tests/integration/acp/p2p/replicator_test.go index 8a581347e1..9c3b0eca3f 100644 --- a/tests/integration/acp/p2p/replicator_test.go +++ b/tests/integration/acp/p2p/replicator_test.go @@ -32,7 +32,7 @@ func TestACP_P2POneToOneReplicatorWithPermissionedCollection_Error(t *testing.T) testUtils.AddPolicy{ - Creator: acpUtils.Actor1Signature, + Identity: acpUtils.Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource diff --git a/tests/integration/acp/p2p/subscribe_test.go b/tests/integration/acp/p2p/subscribe_test.go index baf4c7cd0d..610339d24f 100644 --- a/tests/integration/acp/p2p/subscribe_test.go +++ b/tests/integration/acp/p2p/subscribe_test.go @@ -32,7 +32,7 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollection_Error(t *testing testUtils.AddPolicy{ - Creator: acpUtils.Actor1Signature, + Identity: acpUtils.Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource diff --git a/tests/integration/acp/register_and_delete_test.go b/tests/integration/acp/register_and_delete_test.go index 36a2892677..5d0baf0762 100644 --- a/tests/integration/acp/register_and_delete_test.go +++ b/tests/integration/acp/register_and_delete_test.go @@ -28,7 +28,7 @@ func TestACP_CreateWithoutIdentityAndDeleteWithoutIdentity_CanDelete(t *testing. Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -121,7 +121,7 @@ func TestACP_CreateWithoutIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -180,7 +180,7 @@ func TestACP_CreateWithoutIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) testUtils.DeleteDoc{ CollectionID: 0, - Identity: Actor1Signature, + Identity: Actor1Identity, DocID: 0, }, @@ -206,7 +206,7 @@ func TestACP_CreateWithoutIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) func TestACP_CreateWithIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) { // OwnerIdentity should be the same identity that is used to do the registering/creation, // and the final read check to see the state of that registered document. - OwnerIdentity := Actor1Signature + OwnerIdentity := Actor1Identity test := testUtils.TestCase{ @@ -215,7 +215,7 @@ func TestACP_CreateWithIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: OwnerIdentity, + Identity: OwnerIdentity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -304,7 +304,7 @@ func TestACP_CreateWithIdentityAndDeleteWithIdentity_CanDelete(t *testing.T) { func TestACP_CreateWithIdentityAndDeleteWithoutIdentity_CanNotDelete(t *testing.T) { // OwnerIdentity should be the same identity that is used to do the registering/creation, // and the final read check to see the state of that registered document. - OwnerIdentity := Actor1Signature + OwnerIdentity := Actor1Identity test := testUtils.TestCase{ @@ -313,7 +313,7 @@ func TestACP_CreateWithIdentityAndDeleteWithoutIdentity_CanNotDelete(t *testing. Actions: []any{ testUtils.AddPolicy{ - Creator: OwnerIdentity, + Identity: OwnerIdentity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -408,9 +408,9 @@ func TestACP_CreateWithIdentityAndDeleteWithoutIdentity_CanNotDelete(t *testing. func TestACP_CreateWithIdentityAndDeleteWithWrongIdentity_CanNotDelete(t *testing.T) { // OwnerIdentity should be the same identity that is used to do the registering/creation, // and the final read check to see the state of that registered document. - OwnerIdentity := Actor1Signature + OwnerIdentity := Actor1Identity - WrongIdentity := Actor2Signature + WrongIdentity := Actor2Identity test := testUtils.TestCase{ @@ -419,7 +419,7 @@ func TestACP_CreateWithIdentityAndDeleteWithWrongIdentity_CanNotDelete(t *testin Actions: []any{ testUtils.AddPolicy{ - Creator: OwnerIdentity, + Identity: OwnerIdentity, Policy: ` description: a test policy which marks a collection in a database as a resource diff --git a/tests/integration/acp/register_and_read_test.go b/tests/integration/acp/register_and_read_test.go index d01a8835ea..a2620b82d7 100644 --- a/tests/integration/acp/register_and_read_test.go +++ b/tests/integration/acp/register_and_read_test.go @@ -24,7 +24,7 @@ func TestACP_CreateWithoutIdentityAndReadWithoutIdentity_CanRead(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -112,7 +112,7 @@ func TestACP_CreateWithoutIdentityAndReadWithIdentity_CanRead(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -169,7 +169,7 @@ func TestACP_CreateWithoutIdentityAndReadWithIdentity_CanRead(t *testing.T) { }, testUtils.Request{ - Identity: Actor1Signature, + Identity: Actor1Identity, Request: ` query { @@ -202,7 +202,7 @@ func TestACP_CreateWithIdentityAndReadWithIdentity_CanRead(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -250,7 +250,7 @@ func TestACP_CreateWithIdentityAndReadWithIdentity_CanRead(t *testing.T) { testUtils.CreateDoc{ CollectionID: 0, - Identity: Actor1Signature, + Identity: Actor1Identity, Doc: ` { @@ -261,7 +261,7 @@ func TestACP_CreateWithIdentityAndReadWithIdentity_CanRead(t *testing.T) { }, testUtils.Request{ - Identity: Actor1Signature, + Identity: Actor1Identity, Request: ` query { @@ -294,7 +294,7 @@ func TestACP_CreateWithIdentityAndReadWithoutIdentity_CanNotRead(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -342,7 +342,7 @@ func TestACP_CreateWithIdentityAndReadWithoutIdentity_CanNotRead(t *testing.T) { testUtils.CreateDoc{ CollectionID: 0, - Identity: Actor1Signature, + Identity: Actor1Identity, Doc: ` { @@ -378,7 +378,7 @@ func TestACP_CreateWithIdentityAndReadWithWrongIdentity_CanNotRead(t *testing.T) Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -426,7 +426,7 @@ func TestACP_CreateWithIdentityAndReadWithWrongIdentity_CanNotRead(t *testing.T) testUtils.CreateDoc{ CollectionID: 0, - Identity: Actor1Signature, + Identity: Actor1Identity, Doc: ` { @@ -437,7 +437,7 @@ func TestACP_CreateWithIdentityAndReadWithWrongIdentity_CanNotRead(t *testing.T) }, testUtils.Request{ - Identity: Actor2Signature, + Identity: Actor2Identity, Request: ` query { diff --git a/tests/integration/acp/register_and_update_test.go b/tests/integration/acp/register_and_update_test.go index 1afa89cf86..96810409db 100644 --- a/tests/integration/acp/register_and_update_test.go +++ b/tests/integration/acp/register_and_update_test.go @@ -30,7 +30,7 @@ func TestACP_CreateWithoutIdentityAndUpdateWithoutIdentity_CanUpdate(t *testing. Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -135,7 +135,7 @@ func TestACP_CreateWithoutIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) Actions: []any{ testUtils.AddPolicy{ - Creator: Actor1Signature, + Identity: Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -194,7 +194,7 @@ func TestACP_CreateWithoutIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) testUtils.UpdateDoc{ CollectionID: 0, - Identity: Actor1Signature, + Identity: Actor1Identity, DocID: 0, @@ -232,7 +232,7 @@ func TestACP_CreateWithoutIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) { // OwnerIdentity should be the same identity that is used to do the registering/creation, // and the final read check to see the state of that registered document. - OwnerIdentity := Actor1Signature + OwnerIdentity := Actor1Identity test := testUtils.TestCase{ @@ -241,7 +241,7 @@ func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: OwnerIdentity, + Identity: OwnerIdentity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -342,7 +342,7 @@ func TestACP_CreateWithIdentityAndUpdateWithIdentity_CanUpdate(t *testing.T) { func TestACP_CreateWithIdentityAndUpdateWithoutIdentity_CanNotUpdate(t *testing.T) { // OwnerIdentity should be the same identity that is used to do the registering/creation, // and the final read check to see the state of that registered document. - OwnerIdentity := Actor1Signature + OwnerIdentity := Actor1Identity test := testUtils.TestCase{ @@ -357,7 +357,7 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentity_CanNotUpdate(t *testing. Actions: []any{ testUtils.AddPolicy{ - Creator: OwnerIdentity, + Identity: OwnerIdentity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -458,9 +458,9 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentity_CanNotUpdate(t *testing. func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testing.T) { // OwnerIdentity should be the same identity that is used to do the registering/creation, // and the final read check to see the state of that registered document. - OwnerIdentity := Actor1Signature + OwnerIdentity := Actor1Identity - WrongIdentity := Actor2Signature + WrongIdentity := Actor2Identity test := testUtils.TestCase{ @@ -475,7 +475,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testin Actions: []any{ testUtils.AddPolicy{ - Creator: OwnerIdentity, + Identity: OwnerIdentity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -580,7 +580,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentity_CanNotUpdate(t *testin func TestACP_CreateWithIdentityAndUpdateWithoutIdentityGQL_CanNotUpdate(t *testing.T) { // OwnerIdentity should be the same identity that is used to do the registering/creation, // and the final read check to see the state of that registered document. - OwnerIdentity := Actor1Signature + OwnerIdentity := Actor1Identity test := testUtils.TestCase{ @@ -594,7 +594,7 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentityGQL_CanNotUpdate(t *testi Actions: []any{ testUtils.AddPolicy{ - Creator: OwnerIdentity, + Identity: OwnerIdentity, Policy: ` description: a test policy which marks a collection in a database as a resource @@ -695,9 +695,9 @@ func TestACP_CreateWithIdentityAndUpdateWithoutIdentityGQL_CanNotUpdate(t *testi func TestACP_CreateWithIdentityAndUpdateWithWrongIdentityGQL_CanNotUpdate(t *testing.T) { // OwnerIdentity should be the same identity that is used to do the registering/creation, // and the final read check to see the state of that registered document. - OwnerIdentity := Actor1Signature + OwnerIdentity := Actor1Identity - WrongIdentity := Actor2Signature + WrongIdentity := Actor2Identity test := testUtils.TestCase{ @@ -711,7 +711,7 @@ func TestACP_CreateWithIdentityAndUpdateWithWrongIdentityGQL_CanNotUpdate(t *tes Actions: []any{ testUtils.AddPolicy{ - Creator: OwnerIdentity, + Identity: OwnerIdentity, Policy: ` description: a test policy which marks a collection in a database as a resource diff --git a/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go b/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go index 2e08739176..cfc668a25c 100644 --- a/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go +++ b/tests/integration/acp/schema/add_dpi/accept_basic_dpi_fmts_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_BasicYAML_SchemaAccepted(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a basic policy that satisfies minimum DPI requirements @@ -122,7 +122,7 @@ func TestACP_AddDPISchema_BasicJSON_SchemaAccepted(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` { diff --git a/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go index 5e0bdf5b3c..348736a58c 100644 --- a/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/accept_extra_permissions_on_dpi_test.go @@ -29,7 +29,7 @@ func TestACP_AddDPISchema_WithExtraPermsHavingRequiredRelation_AcceptSchema(t *t testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) @@ -128,7 +128,7 @@ func TestACP_AddDPISchema_WithExtraPermsHavingRequiredRelationInTheEnd_AcceptSch testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) @@ -227,7 +227,7 @@ func TestACP_AddDPISchema_WithExtraPermsHavingNoRequiredRelation_AcceptSchema(t testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go index 01c674426e..72f622201f 100644 --- a/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/accept_managed_relation_on_dpi_test.go @@ -29,7 +29,7 @@ func TestACP_AddDPISchema_WithManagedRelation_AcceptSchemas(t *testing.T) { testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go index 666f4264ee..a991d4b280 100644 --- a/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/accept_mixed_resources_on_partial_dpi_test.go @@ -29,7 +29,7 @@ func TestACP_AddDPISchema_PartialValidDPIButUseOnlyValidDPIResource_AcceptSchema testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Partially Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go b/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go index 19181ec1a1..0ec43706ee 100644 --- a/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go +++ b/tests/integration/acp/schema/add_dpi/accept_multi_dpis_test.go @@ -53,7 +53,7 @@ func TestACP_AddDPISchema_AddDuplicateDPIsByOtherCreatorsUseBoth_AcceptSchema(t Actions: []any{ testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: validDPIUsedByBoth, @@ -62,7 +62,7 @@ func TestACP_AddDPISchema_AddDuplicateDPIsByOtherCreatorsUseBoth_AcceptSchema(t testUtils.AddPolicy{ - Creator: actor2Signature, + Identity: actor2Identity, Policy: validDPIUsedByBoth, diff --git a/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go index d8c259d57c..9903bc18d2 100644 --- a/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/accept_multi_resources_on_dpi_test.go @@ -29,7 +29,7 @@ func TestACP_AddDPISchema_WithMultipleResources_AcceptSchema(t *testing.T) { testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) @@ -138,7 +138,7 @@ func TestACP_AddDPISchema_WithMultipleResourcesBothBeingUsed_AcceptSchema(t *tes testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go b/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go index 89aab30d6f..086a69a1b1 100644 --- a/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go +++ b/tests/integration/acp/schema/add_dpi/accept_same_resource_on_diff_schemas_test.go @@ -30,7 +30,7 @@ func TestACP_AddDPISchema_UseSameResourceOnDifferentSchemas_AcceptSchemas(t *tes testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/fixture.go b/tests/integration/acp/schema/add_dpi/fixture.go index 0202cddeaa..3b3c83da6f 100644 --- a/tests/integration/acp/schema/add_dpi/fixture.go +++ b/tests/integration/acp/schema/add_dpi/fixture.go @@ -14,5 +14,5 @@ import ( acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" ) -var actor1Signature = acpUtils.Actor1Signature -var actor2Signature = acpUtils.Actor2Signature +var actor1Identity = acpUtils.Actor1Identity +var actor2Identity = acpUtils.Actor2Identity diff --git a/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go index ea4bd2476f..b23f47e19b 100644 --- a/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_empty_arg_on_schema_test.go @@ -27,7 +27,7 @@ func TestACP_AddDPISchema_NoArgWasSpecifiedOnSchema_SchemaRejected(t *testing.T) testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) @@ -101,7 +101,7 @@ func TestACP_AddDPISchema_SpecifiedArgsAreEmptyOnSchema_SchemaRejected(t *testin testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go index a94930424e..94b3fd2dde 100644 --- a/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_InvalidPolicyIDArgTypeWasSpecifiedOnSchema_SchemaRejec testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) @@ -102,7 +102,7 @@ func TestACP_AddDPISchema_InvalidResourceArgTypeWasSpecifiedOnSchema_SchemaRejec testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go index 05dc4b8b9c..540222d37b 100644 --- a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_on_dpi_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_OwnerMissingRequiredReadPermissionOnDPI_SchemaRejected testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -112,7 +112,7 @@ func TestACP_AddDPISchema_OwnerMissingRequiredReadPermissionLabelOnDPI_SchemaRej testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -195,7 +195,7 @@ func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnReadPermissionExprOnDPI_Sch testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -279,7 +279,7 @@ func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnReadPermissionNoSpaceExprOn testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -363,7 +363,7 @@ func TestACP_AddDPISchema_MaliciousOwnerSpecifiedOnReadPermissionExprOnDPI_Schem testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go index 4c16683551..29ec5a9ecf 100644 --- a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_read_perm_symbol_on_dpi_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_OwnerRelationWithDifferenceSetOpOnReadPermissionExprOn testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -113,7 +113,7 @@ func TestACP_AddDPISchema_OwnerRelationWithIntersectionSetOpOnReadPermissionExpr testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -198,7 +198,7 @@ func TestACP_AddDPISchema_OwnerRelationWithInvalidSetOpOnReadPermissionExprOnDPI testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go index def7044bf8..f3b5877444 100644 --- a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_on_dpi_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_OwnerMissingRequiredWritePermissionOnDPI_SchemaRejecte testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -112,7 +112,7 @@ func TestACP_AddDPISchema_OwnerMissingRequiredWritePermissionLabelOnDPI_SchemaRe testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -195,7 +195,7 @@ func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnWritePermissionExprOnDPI_Sc testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -279,7 +279,7 @@ func TestACP_AddDPISchema_OwnerSpecifiedIncorrectlyOnWritePermissionNoSpaceExprO testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -363,7 +363,7 @@ func TestACP_AddDPISchema_MaliciousOwnerSpecifiedOnWritePermissionExprOnDPI_Sche testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go index 56712afd9f..96ff618123 100644 --- a/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_owner_write_perm_symbol_on_dpi_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_OwnerRelationWithDifferenceSetOpOnWritePermissionExprO testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -113,7 +113,7 @@ func TestACP_AddDPISchema_OwnerRelationWithIntersectionSetOpOnWritePermissionExp testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy @@ -198,7 +198,7 @@ func TestACP_AddDPISchema_OwnerRelationWithInvalidSetOpOnWritePermissionExprOnDP testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: a policy diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go index dc513f9827..c59008edf5 100644 --- a/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_missing_dpi_test.go @@ -79,7 +79,7 @@ func TestACP_AddDPISchema_WhereAPolicyWasAddedButLinkedPolicyWasNotAdded_SchemaR testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go index 5b72fcea96..7a5942f2e6 100644 --- a/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_missing_id_arg_on_schema_test.go @@ -27,7 +27,7 @@ func TestACP_AddDPISchema_NoPolicyIDWasSpecifiedOnSchema_SchemaRejected(t *testi testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) @@ -101,7 +101,7 @@ func TestACP_AddDPISchema_SpecifiedPolicyIDArgIsEmptyOnSchema_SchemaRejected(t * testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go index 1a9b1635bc..16c6eb1024 100644 --- a/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_missing_perms_on_dpi_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_MissingRequiredReadPermissionOnDPI_SchemaRejected(t *t testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A policy diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go index a8296f80ca..45635eae15 100644 --- a/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_missing_resource_arg_on_schema_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_NoResourceWasSpecifiedOnSchema_SchemaRejected(t *testi testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) @@ -104,7 +104,7 @@ func TestACP_AddDPISchema_SpecifiedResourceArgIsEmptyOnSchema_SchemaRejected(t * testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go index edcbe9136a..3d50f1c2a7 100644 --- a/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_missing_resource_on_dpi_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_SpecifiedResourceDoesNotExistOnDPI_SchemaRejected(t *t testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Valid Defra Policy Interface (DPI) diff --git a/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go b/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go index b99c254497..ba9e06a2b6 100644 --- a/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_mixed_resources_on_partial_dpi_test.go @@ -28,7 +28,7 @@ func TestACP_AddDPISchema_PartialValidDPIButUseInValidDPIResource_RejectSchema(t testUtils.AddPolicy{ - Creator: actor1Signature, + Identity: actor1Identity, Policy: ` description: A Partially Valid Defra Policy Interface (DPI) diff --git a/tests/integration/collection/update/simple/with_doc_id_test.go b/tests/integration/collection/update/simple/with_doc_id_test.go index 7badb3b66c..cea7117682 100644 --- a/tests/integration/collection/update/simple/with_doc_id_test.go +++ b/tests/integration/collection/update/simple/with_doc_id_test.go @@ -16,7 +16,6 @@ import ( "github.com/stretchr/testify/assert" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) @@ -44,7 +43,6 @@ func TestUpdateWithDocID(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithDocID( ctx, - acpIdentity.NoIdentity, doc.ID(), `{name: "Eric"}`, ) @@ -62,7 +60,7 @@ func TestUpdateWithDocID(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithDocID(ctx, acpIdentity.NoIdentity, doc.ID(), `"name: Eric"`) + _, err := c.UpdateWithDocID(ctx, doc.ID(), `"name: Eric"`) return err }, }, @@ -79,7 +77,7 @@ func TestUpdateWithDocID(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithDocID( ctx, - acpIdentity.NoIdentity, + doc.ID(), `[ { @@ -93,7 +91,7 @@ func TestUpdateWithDocID(t *testing.T) { return err } - d, err := c.Get(ctx, acpIdentity.NoIdentity, doc.ID(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } @@ -120,7 +118,7 @@ func TestUpdateWithDocID(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithDocID( ctx, - acpIdentity.NoIdentity, + doc.ID(), `{"name": "Eric"}`, ) @@ -128,7 +126,7 @@ func TestUpdateWithDocID(t *testing.T) { return err } - d, err := c.Get(ctx, acpIdentity.NoIdentity, doc.ID(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/update/simple/with_doc_ids_test.go b/tests/integration/collection/update/simple/with_doc_ids_test.go index d1b38843a5..2469eeee56 100644 --- a/tests/integration/collection/update/simple/with_doc_ids_test.go +++ b/tests/integration/collection/update/simple/with_doc_ids_test.go @@ -16,7 +16,6 @@ import ( "github.com/stretchr/testify/assert" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) @@ -57,7 +56,6 @@ func TestUpdateWithDocIDs(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithDocIDs( ctx, - acpIdentity.NoIdentity, []client.DocID{doc1.ID(), doc2.ID()}, `{name: "Eric"}`, ) @@ -80,7 +78,6 @@ func TestUpdateWithDocIDs(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithDocIDs( ctx, - acpIdentity.NoIdentity, []client.DocID{doc1.ID(), doc2.ID()}, `"name: Eric"`, ) @@ -103,7 +100,6 @@ func TestUpdateWithDocIDs(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithDocIDs( ctx, - acpIdentity.NoIdentity, []client.DocID{doc1.ID(), doc2.ID()}, `[ { @@ -117,7 +113,7 @@ func TestUpdateWithDocIDs(t *testing.T) { return err } - d, err := c.Get(ctx, acpIdentity.NoIdentity, doc1.ID(), false) + d, err := c.Get(ctx, doc1.ID(), false) if err != nil { return err } @@ -129,7 +125,7 @@ func TestUpdateWithDocIDs(t *testing.T) { assert.Equal(t, "John", name) - d2, err := c.Get(ctx, acpIdentity.NoIdentity, doc2.ID(), false) + d2, err := c.Get(ctx, doc2.ID(), false) if err != nil { return err } @@ -159,7 +155,7 @@ func TestUpdateWithDocIDs(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithDocIDs( ctx, - acpIdentity.NoIdentity, + []client.DocID{doc1.ID(), doc2.ID()}, `{"age": 40}`, ) @@ -167,7 +163,7 @@ func TestUpdateWithDocIDs(t *testing.T) { return err } - d, err := c.Get(ctx, acpIdentity.NoIdentity, doc1.ID(), false) + d, err := c.Get(ctx, doc1.ID(), false) if err != nil { return err } @@ -179,7 +175,7 @@ func TestUpdateWithDocIDs(t *testing.T) { assert.Equal(t, int64(40), name) - d2, err := c.Get(ctx, acpIdentity.NoIdentity, doc2.ID(), false) + d2, err := c.Get(ctx, doc2.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/update/simple/with_filter_test.go b/tests/integration/collection/update/simple/with_filter_test.go index 54f5b918ac..bbcfc5b8bc 100644 --- a/tests/integration/collection/update/simple/with_filter_test.go +++ b/tests/integration/collection/update/simple/with_filter_test.go @@ -16,7 +16,6 @@ import ( "github.com/stretchr/testify/assert" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) @@ -32,7 +31,6 @@ func TestUpdateWithInvalidFilterType(t *testing.T) { // test with an invalid filter type _, err := c.UpdateWithFilter( ctx, - acpIdentity.NoIdentity, t, `{"name": "Eric"}`, ) @@ -57,7 +55,6 @@ func TestUpdateWithEmptyFilter(t *testing.T) { // test with an empty filter _, err := c.UpdateWithFilter( ctx, - acpIdentity.NoIdentity, "", `{"name": "Eric"}`, ) @@ -96,7 +93,6 @@ func TestUpdateWithFilter(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithFilter( ctx, - acpIdentity.NoIdentity, filter, `{name: "Eric"}`, ) @@ -116,7 +112,6 @@ func TestUpdateWithFilter(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithFilter( ctx, - acpIdentity.NoIdentity, filter, `"name: Eric"`, ) @@ -136,7 +131,6 @@ func TestUpdateWithFilter(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithFilter( ctx, - acpIdentity.NoIdentity, filter, `[ { @@ -150,7 +144,7 @@ func TestUpdateWithFilter(t *testing.T) { return err } - d, err := c.Get(ctx, acpIdentity.NoIdentity, doc.ID(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } @@ -177,7 +171,6 @@ func TestUpdateWithFilter(t *testing.T) { ctx := context.Background() _, err := c.UpdateWithFilter( ctx, - acpIdentity.NoIdentity, filter, `{"name": "Eric"}`, ) @@ -185,7 +178,7 @@ func TestUpdateWithFilter(t *testing.T) { return err } - d, err := c.Get(ctx, acpIdentity.NoIdentity, doc.ID(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/utils.go b/tests/integration/collection/utils.go index eb053be594..b8bf1cf46b 100644 --- a/tests/integration/collection/utils.go +++ b/tests/integration/collection/utils.go @@ -17,7 +17,6 @@ import ( "github.com/stretchr/testify/assert" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" testUtils "github.com/sourcenetwork/defradb/tests/integration" @@ -91,7 +90,7 @@ func setupDatabase( if assertError(t, testCase.Description, err, testCase.ExpectedError) { return } - err = col.Save(ctx, acpIdentity.NoIdentity, doc) + err = col.Save(ctx, doc) if assertError(t, testCase.Description, err, testCase.ExpectedError) { return } diff --git a/tests/integration/collection_description/updates/remove/policy_test.go b/tests/integration/collection_description/updates/remove/policy_test.go index 8fdff5eb8e..1d4de5d399 100644 --- a/tests/integration/collection_description/updates/remove/policy_test.go +++ b/tests/integration/collection_description/updates/remove/policy_test.go @@ -22,7 +22,7 @@ func TestColDescrUpdateRemovePolicy_Errors(t *testing.T) { Actions: []any{ testUtils.AddPolicy{ - Creator: acpUtils.Actor1Signature, + Identity: acpUtils.Actor1Identity, Policy: ` description: a test policy which marks a collection in a database as a resource diff --git a/tests/integration/events/simple/with_create_test.go b/tests/integration/events/simple/with_create_test.go index 1e75687a4e..ec5c174106 100644 --- a/tests/integration/events/simple/with_create_test.go +++ b/tests/integration/events/simple/with_create_test.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -49,11 +48,11 @@ func TestEventsSimpleWithCreate(t *testing.T) { CollectionCalls: map[string][]func(client.Collection){ "Users": []func(c client.Collection){ func(c client.Collection) { - err = c.Save(context.Background(), acpIdentity.NoIdentity, doc1) + err = c.Save(context.Background(), doc1) assert.Nil(t, err) }, func(c client.Collection) { - err = c.Save(context.Background(), acpIdentity.NoIdentity, doc2) + err = c.Save(context.Background(), doc2) assert.Nil(t, err) }, }, diff --git a/tests/integration/events/simple/with_create_txn_test.go b/tests/integration/events/simple/with_create_txn_test.go index f90fc96a88..7d7238b546 100644 --- a/tests/integration/events/simple/with_create_txn_test.go +++ b/tests/integration/events/simple/with_create_txn_test.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/db" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" @@ -29,7 +28,6 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { func(ctx context.Context, d client.DB) { r := d.ExecRequest( ctx, - acpIdentity.NoIdentity, `mutation { create_Users(input: {name: "John"}) { _docID @@ -47,7 +45,6 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { ctx = db.SetContextTxn(ctx, txn) r := d.ExecRequest( ctx, - acpIdentity.NoIdentity, `mutation { create_Users(input: {name: "Shahzad"}) { _docID diff --git a/tests/integration/events/simple/with_delete_test.go b/tests/integration/events/simple/with_delete_test.go index 00f5a5977f..b02b2505e1 100644 --- a/tests/integration/events/simple/with_delete_test.go +++ b/tests/integration/events/simple/with_delete_test.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -38,11 +37,11 @@ func TestEventsSimpleWithDelete(t *testing.T) { CollectionCalls: map[string][]func(client.Collection){ "Users": []func(c client.Collection){ func(c client.Collection) { - err = c.Save(context.Background(), acpIdentity.NoIdentity, doc1) + err = c.Save(context.Background(), doc1) assert.Nil(t, err) }, func(c client.Collection) { - wasDeleted, err := c.Delete(context.Background(), acpIdentity.NoIdentity, doc1.ID()) + wasDeleted, err := c.Delete(context.Background(), doc1.ID()) assert.Nil(t, err) assert.True(t, wasDeleted) }, diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index 26c2e4363a..723421f91b 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration/events" ) @@ -49,17 +48,17 @@ func TestEventsSimpleWithUpdate(t *testing.T) { CollectionCalls: map[string][]func(client.Collection){ "Users": []func(c client.Collection){ func(c client.Collection) { - err = c.Save(context.Background(), acpIdentity.NoIdentity, doc1) + err = c.Save(context.Background(), doc1) assert.Nil(t, err) }, func(c client.Collection) { - err = c.Save(context.Background(), acpIdentity.NoIdentity, doc2) + err = c.Save(context.Background(), doc2) assert.Nil(t, err) }, func(c client.Collection) { // Update John doc1.Set("name", "Johnnnnn") - err = c.Save(context.Background(), acpIdentity.NoIdentity, doc1) + err = c.Save(context.Background(), doc1) assert.Nil(t, err) }, }, diff --git a/tests/integration/events/utils.go b/tests/integration/events/utils.go index 51299192e8..d2bf418294 100644 --- a/tests/integration/events/utils.go +++ b/tests/integration/events/utils.go @@ -19,7 +19,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/db" testUtils "github.com/sourcenetwork/defradb/tests/integration" @@ -153,7 +152,7 @@ func setupDatabase( doc, err := client.NewDocFromJSON([]byte(docStr), col.Schema()) require.NoError(t, err) - err = col.Save(ctx, acpIdentity.NoIdentity, doc) + err = col.Save(ctx, doc) require.NoError(t, err) } } diff --git a/tests/integration/explain.go b/tests/integration/explain.go index c087401588..da2adb69e5 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -20,7 +20,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" ) @@ -133,7 +132,6 @@ func executeExplainRequest( for _, node := range getNodes(action.NodeID, s.nodes) { result := node.ExecRequest( s.ctx, - acpIdentity.NewIdentity(action.Identity), action.Request, ) assertExplainRequestResults(s, &result.GQL, action) diff --git a/tests/integration/net/order/utils.go b/tests/integration/net/order/utils.go index 2727690281..e744c4735d 100644 --- a/tests/integration/net/order/utils.go +++ b/tests/integration/net/order/utils.go @@ -19,9 +19,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/sourcenetwork/immutable" - - acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" coreDB "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" @@ -77,7 +74,6 @@ type P2PTestCase struct { func setupDefraNode( t *testing.T, - identity immutable.Option[string], opts []net.NodeOpt, peers []string, seeds []string, @@ -97,7 +93,7 @@ func setupDefraNode( // seed the database with a set of documents docIDs := []client.DocID{} for _, document := range seeds { - docID, err := seedDocument(ctx, identity, db, document) + docID, err := seedDocument(ctx, db, document) require.NoError(t, err) docIDs = append(docIDs, docID) } @@ -136,7 +132,6 @@ func seedSchema(ctx context.Context, db client.DB) error { func seedDocument( ctx context.Context, - identity immutable.Option[string], db client.DB, document string, ) (client.DocID, error) { @@ -150,7 +145,7 @@ func seedDocument( return client.DocID{}, err } - err = col.Save(ctx, identity, doc) + err = col.Save(ctx, doc) if err != nil { return client.DocID{}, err } @@ -160,7 +155,6 @@ func seedDocument( func saveDocument( ctx context.Context, - identity immutable.Option[string], db client.DB, document *client.Document, ) error { @@ -169,12 +163,11 @@ func saveDocument( return err } - return col.Save(ctx, identity, document) + return col.Save(ctx, document) } func updateDocument( ctx context.Context, - identity immutable.Option[string], db client.DB, docID client.DocID, update string, @@ -184,7 +177,7 @@ func updateDocument( return err } - doc, err := getDocument(ctx, identity, db, docID) + doc, err := getDocument(ctx, db, docID) if err != nil { return err } @@ -193,12 +186,11 @@ func updateDocument( return err } - return col.Save(ctx, identity, doc) + return col.Save(ctx, doc) } func getDocument( ctx context.Context, - identity immutable.Option[string], db client.DB, docID client.DocID, ) (*client.Document, error) { @@ -207,7 +199,7 @@ func getDocument( return nil, err } - doc, err := col.Get(ctx, identity, docID, false) + doc, err := col.Get(ctx, docID, false) if err != nil { return nil, err } @@ -238,7 +230,6 @@ func executeTestCase(t *testing.T, test P2PTestCase) { } n, d, err := setupDefraNode( t, - acpIdentity.NewIdentity(test.Identity), cfg, peerAddresses, test.SeedDocuments, @@ -270,8 +261,6 @@ func executeTestCase(t *testing.T, test P2PTestCase) { } } - identity := acpIdentity.NewIdentity(test.Identity) - // update and sync peers for n, updateMap := range test.Updates { if n >= len(nodes) { @@ -284,7 +273,6 @@ func executeTestCase(t *testing.T, test P2PTestCase) { log.InfoContext(ctx, fmt.Sprintf("Updating node %d with update %d", n, d)) err := updateDocument( ctx, - identity, nodes[n].DB, docIDs[d], update, @@ -318,7 +306,6 @@ func executeTestCase(t *testing.T, test P2PTestCase) { for field, result := range results { doc, err := getDocument( ctx, - identity, nodes[n2].DB, docIDs[d], ) @@ -355,7 +342,6 @@ func executeTestCase(t *testing.T, test P2PTestCase) { for _, doc := range test.DocumentsToReplicate { err := saveDocument( ctx, - identity, nodes[n].DB, doc, ) @@ -374,7 +360,6 @@ func executeTestCase(t *testing.T, test P2PTestCase) { doc, err := getDocument( ctx, - identity, nodes[rep].DB, d, ) diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index deb38acde3..b699ed9f7f 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -848,14 +848,10 @@ func refreshDocuments( continue } + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) // The document may have been mutated by other actions, so to be sure we have the latest // version without having to worry about the individual update mechanics we fetch it. - doc, err = collection.Get( - s.ctx, - acpIdentity.NewIdentity(action.Identity), - doc.ID(), - false, - ) + doc, err = collection.Get(ctx, doc.ID(), false) if err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway @@ -1204,11 +1200,12 @@ func createDocViaColSave( return nil, err } - return doc, collections[action.CollectionID].Save( - s.ctx, - acpIdentity.NewIdentity(action.Identity), - doc, - ) + txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) + + ctx := db.SetContextTxn(s.ctx, txn) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + return doc, collections[action.CollectionID].Save(ctx, doc) } func createDocViaColCreate( @@ -1223,11 +1220,12 @@ func createDocViaColCreate( return nil, err } - return doc, collections[action.CollectionID].Create( - s.ctx, - acpIdentity.NewIdentity(action.Identity), - doc, - ) + txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) + + ctx := db.SetContextTxn(s.ctx, txn) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + return doc, collections[action.CollectionID].Create(ctx, doc) } func createDocViaGQL( @@ -1253,11 +1251,11 @@ func createDocViaGQL( txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) - identity := acpIdentity.NewIdentity(action.Identity) ctx := db.SetContextTxn(s.ctx, txn) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + result := node.ExecRequest( ctx, - identity, request, ) if len(result.GQL.Errors) > 0 { @@ -1273,7 +1271,7 @@ func createDocViaGQL( docID, err := client.NewDocIDFromString(docIDString) require.NoError(s.t, err) - doc, err := collection.Get(s.ctx, identity, docID, false) + doc, err := collection.Get(ctx, docID, false) require.NoError(s.t, err) return doc, nil @@ -1286,6 +1284,7 @@ func deleteDoc( action DeleteDoc, ) { doc := s.documents[action.CollectionID][action.DocID] + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) var expectedErrorRaised bool actionNodes := getNodes(action.NodeID, s.nodes) @@ -1294,11 +1293,7 @@ func deleteDoc( actionNodes, nodeID, func() error { - _, err := collections[action.CollectionID].DeleteWithDocID( - s.ctx, - acpIdentity.NewIdentity(action.Identity), - doc.ID(), - ) + _, err := collections[action.CollectionID].DeleteWithDocID(ctx, doc.ID()) return err }, ) @@ -1347,14 +1342,9 @@ func updateDocViaColSave( collections []client.Collection, ) error { cachedDoc := s.documents[action.CollectionID][action.DocID] + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) - identity := acpIdentity.NewIdentity(action.Identity) - doc, err := collections[action.CollectionID].Get( - s.ctx, - identity, - cachedDoc.ID(), - true, - ) + doc, err := collections[action.CollectionID].Get(ctx, cachedDoc.ID(), true) if err != nil { return err } @@ -1367,8 +1357,7 @@ func updateDocViaColSave( s.documents[action.CollectionID][action.DocID] = doc return collections[action.CollectionID].Save( - s.ctx, - identity, + ctx, doc, ) } @@ -1380,14 +1369,9 @@ func updateDocViaColUpdate( collections []client.Collection, ) error { cachedDoc := s.documents[action.CollectionID][action.DocID] + ctx := db.SetContextIdentity(s.ctx, acpIdentity.New(action.Identity)) - identity := acpIdentity.NewIdentity(action.Identity) - doc, err := collections[action.CollectionID].Get( - s.ctx, - identity, - cachedDoc.ID(), - true, - ) + doc, err := collections[action.CollectionID].Get(ctx, cachedDoc.ID(), true) if err != nil { return err } @@ -1399,11 +1383,7 @@ func updateDocViaColUpdate( s.documents[action.CollectionID][action.DocID] = doc - return collections[action.CollectionID].Update( - s.ctx, - identity, - doc, - ) + return collections[action.CollectionID].Update(ctx, doc) } func updateDocViaGQL( @@ -1430,12 +1410,11 @@ func updateDocViaGQL( ) txn := getTransaction(s, node, immutable.None[int](), action.ExpectedError) + ctx := db.SetContextTxn(s.ctx, txn) - result := node.ExecRequest( - ctx, - acpIdentity.NewIdentity(action.Identity), - request, - ) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + result := node.ExecRequest(ctx, request) if len(result.GQL.Errors) > 0 { return result.GQL.Errors[0] } @@ -1651,12 +1630,11 @@ func executeRequest( var expectedErrorRaised bool for nodeID, node := range getNodes(action.NodeID, s.nodes) { txn := getTransaction(s, node, action.TransactionID, action.ExpectedError) + ctx := db.SetContextTxn(s.ctx, txn) - result := node.ExecRequest( - ctx, - acpIdentity.NewIdentity(action.Identity), - action.Request, - ) + ctx = db.SetContextIdentity(ctx, acpIdentity.New(action.Identity)) + + result := node.ExecRequest(ctx, action.Request) anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( @@ -1688,11 +1666,7 @@ func executeSubscriptionRequest( subscriptionAssert := make(chan func()) for _, node := range getNodes(action.NodeID, s.nodes) { - result := node.ExecRequest( - s.ctx, - acpIdentity.NoIdentity, // No Identity for subscription request. - action.Request, - ) + result := node.ExecRequest(s.ctx, action.Request) if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) { return } @@ -1874,11 +1848,7 @@ func assertIntrospectionResults( action IntrospectionRequest, ) bool { for _, node := range getNodes(action.NodeID, s.nodes) { - result := node.ExecRequest( - s.ctx, - acpIdentity.NoIdentity, // No Identity for introspection requests. - action.Request, - ) + result := node.ExecRequest(s.ctx, action.Request) if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) { return true @@ -1909,11 +1879,7 @@ func assertClientIntrospectionResults( action ClientIntrospectionRequest, ) bool { for _, node := range getNodes(action.NodeID, s.nodes) { - result := node.ExecRequest( - s.ctx, - acpIdentity.NoIdentity, // No identity for client introspection requests. - action.Request, - ) + result := node.ExecRequest(s.ctx, action.Request) if AssertErrors(s.t, s.testCase.Description, result.GQL.Errors, action.ExpectedError) { return true From 4a75f23e6b6c25dcc2c3d717124db25779dba362 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Fri, 19 Apr 2024 15:46:42 -0400 Subject: [PATCH 32/49] feat: Move relation field properties onto collection (#2529) ## Relevant issue(s) Resolves #2451 #1911 #2408 ## Description Moves relation field properties onto collection description from schema description. This allows for one-sided relations to be defined via `PatchSchema`. There is no way atm to create them using an SDL, but we could add a directive or something at somepoint if we want. As the adding of fields via `PatchCollection` has not yet been enabled, this does prevent users from adding secondary relation fields to an existing schema. I'll create a ticket before merging this to allow this, it is my strong preference to not do that in this already large and fiddly PR though. As the internal codebase relies on the setting of secondary fields via `client.Document`, `client.Document` now requires a collection, not just the schema, when being constructed. We will likely want to find a way to avoid that in the future. This PR also moves some validation from the graphql package into `db`, not all of it has been moved, but that is a long term wish of mine. The `db` package validation can/should be improved further making rule reuse across `PatchCollection`, `PatchSchema` and `CreateCollection` (SDL), however I would rather not spend too much effort on that in this PR. This includes moving it out of the `collection.go` file. It might resolve https://github.com/sourcenetwork/defradb/issues/2380 but I'd rather not bring that into scope. If I'm waiting around for reviews I might verify that here though. This should conclude the transfer of local properties off of the schema object :) --- acp/README.md | 2 +- cli/collection_create.go | 4 +- client/collection_description.go | 17 +- client/collection_field_description.go | 51 +- client/definitions.go | 94 ++- client/document.go | 36 +- client/document_test.go | 32 +- client/errors.go | 24 - client/schema_description.go | 9 +- client/schema_field_description.go | 79 +- db/backup.go | 6 +- db/backup_test.go | 32 +- db/collection.go | 254 +++++-- db/collection_get.go | 2 +- db/collection_index.go | 2 +- db/collection_update.go | 6 +- db/errors.go | 44 +- db/fetcher/encoded_doc.go | 4 +- db/indexed_docs_test.go | 16 +- db/schema.go | 2 +- db/view.go | 2 +- .../i2451-rel-field-props-local.md | 3 + http/client_collection.go | 2 +- http/handler_ccip_test.go | 2 +- http/handler_collection.go | 4 +- net/client_test.go | 30 +- net/dag_test.go | 6 +- net/peer_test.go | 18 +- net/server_test.go | 6 +- planner/create.go | 2 +- planner/mapper/mapper.go | 6 +- planner/type_join.go | 58 +- request/graphql/schema/collection.go | 302 +++++--- request/graphql/schema/descriptions_test.go | 335 ++++---- request/graphql/schema/errors.go | 24 +- request/graphql/schema/generate.go | 5 +- request/graphql/schema/relations.go | 165 ---- tests/bench/bench_util.go | 2 +- tests/bench/collection/utils.go | 6 +- tests/clients/cli/wrapper_collection.go | 2 +- tests/gen/gen_auto.go | 10 +- tests/gen/gen_auto_config.go | 9 +- tests/gen/gen_auto_configurator.go | 12 +- tests/gen/gen_auto_test.go | 74 +- .../update/simple/with_doc_id_test.go | 2 +- .../update/simple/with_doc_ids_test.go | 4 +- .../update/simple/with_filter_test.go | 2 +- tests/integration/collection/utils.go | 2 +- .../events/simple/with_create_test.go | 4 +- .../events/simple/with_delete_test.go | 2 +- .../events/simple/with_update_test.go | 8 +- tests/integration/events/utils.go | 2 +- .../explain/default/type_join_many_test.go | 4 +- .../explain/default/type_join_one_test.go | 8 +- .../explain/default/type_join_test.go | 8 +- .../explain/default/with_average_join_test.go | 8 +- .../explain/default/with_count_join_test.go | 8 +- .../explain/default/with_sum_join_test.go | 10 +- .../mutation/create/with_version_test.go | 2 +- .../one_to_many/with_show_deleted_test.go | 4 +- .../one_to_many/with_alias_test.go | 4 +- tests/integration/net/order/tcp_test.go | 2 +- tests/integration/net/order/utils.go | 2 +- .../peer/subscribe/with_add_get_test.go | 2 +- .../simple/replicator/with_create_test.go | 4 +- .../integration/query/commits/simple_test.go | 50 +- .../query/commits/with_cid_test.go | 8 +- .../query/commits/with_depth_test.go | 34 +- .../query/commits/with_doc_id_cid_test.go | 4 +- .../query/commits/with_doc_id_count_test.go | 6 +- .../query/commits/with_doc_id_field_test.go | 4 +- .../commits/with_doc_id_limit_offset_test.go | 4 +- .../query/commits/with_doc_id_limit_test.go | 4 +- .../with_doc_id_order_limit_offset_test.go | 4 +- .../query/commits/with_doc_id_order_test.go | 66 +- .../query/commits/with_doc_id_test.go | 46 +- .../commits/with_doc_id_typename_test.go | 6 +- .../query/commits/with_field_test.go | 8 +- .../query/commits/with_group_test.go | 16 +- .../latest_commits/with_doc_id_field_test.go | 8 +- .../query/latest_commits/with_doc_id_test.go | 10 +- .../query/one_to_many/with_cid_doc_id_test.go | 8 +- .../query/one_to_many/with_id_field_test.go | 41 +- .../one_to_one/with_clashing_id_field_test.go | 17 +- .../query/simple/with_cid_doc_id_test.go | 20 +- .../query/simple/with_version_test.go | 38 +- tests/integration/schema/crdt_type_test.go | 8 +- tests/integration/schema/get_schema_test.go | 14 +- .../schema/migrations/query/simple_test.go | 68 +- .../migrations/query/with_doc_id_test.go | 8 +- .../migrations/query/with_inverse_test.go | 10 +- .../query/with_p2p_schema_branch_test.go | 4 +- .../schema/migrations/query/with_p2p_test.go | 20 +- .../migrations/query/with_restart_test.go | 8 +- .../query/with_schema_branch_test.go | 2 +- .../migrations/query/with_set_default_test.go | 10 +- .../schema/migrations/query/with_txn_test.go | 8 +- .../migrations/query/with_update_test.go | 8 +- .../schema/migrations/simple_test.go | 8 +- tests/integration/schema/one_one_test.go | 2 +- tests/integration/schema/relations_test.go | 40 +- tests/integration/schema/simple_test.go | 4 +- .../updates/add/field/create_update_test.go | 8 +- .../field/kind/foreign_object_array_test.go | 718 +----------------- .../add/field/kind/foreign_object_test.go | 326 +------- .../schema/updates/add/field/simple_test.go | 8 +- .../schema/updates/move/simple_test.go | 2 +- .../updates/remove/fields/simple_test.go | 29 - .../schema/updates/test/field/simple_test.go | 4 +- .../schema/updates/with_schema_branch_test.go | 30 +- .../schema/with_update_set_default_test.go | 4 +- tests/integration/utils2.go | 6 +- .../view/one_to_many/simple_test.go | 45 +- tests/predefined/gen_predefined.go | 14 +- tests/predefined/gen_predefined_test.go | 44 +- tests/predefined/util_test.go | 12 +- 116 files changed, 1474 insertions(+), 2293 deletions(-) create mode 100644 docs/data_format_changes/i2451-rel-field-props-local.md delete mode 100644 request/graphql/schema/relations.go diff --git a/acp/README.md b/acp/README.md index 3fb49968f8..697a60a0c2 100644 --- a/acp/README.md +++ b/acp/README.md @@ -212,7 +212,7 @@ Result: "Name": "Users", "ID": 1, "RootID": 1, - "SchemaVersionID": "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", + "SchemaVersionID": "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", "Sources": [], "Fields": [ { diff --git a/cli/collection_create.go b/cli/collection_create.go index c61a286326..df7d8794b5 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -69,14 +69,14 @@ Example: create from stdin: } if client.IsJSONArray(docData) { - docs, err := client.NewDocsFromJSON(docData, col.Schema()) + docs, err := client.NewDocsFromJSON(docData, col.Definition()) if err != nil { return err } return col.CreateMany(cmd.Context(), docs) } - doc, err := client.NewDocFromJSON(docData, col.Schema()) + doc, err := client.NewDocFromJSON(docData, col.Definition()) if err != nil { return err } diff --git a/client/collection_description.go b/client/collection_description.go index 2db34ddb8b..aa22bf7121 100644 --- a/client/collection_description.go +++ b/client/collection_description.go @@ -60,7 +60,11 @@ type CollectionDescription struct { // - [CollectionSource] Sources []any - // Fields contains the fields within this Collection. + // Fields contains the fields local to the node within this Collection. + // + // Most fields defined here will also be present on the [SchemaDescription]. A notable + // exception to this are the fields of the (optional) secondary side of a relation + // which are local only, and will not be present on the [SchemaDescription]. Fields []CollectionFieldDescription // Indexes contains the secondary indexes that this Collection has. @@ -136,16 +140,15 @@ func (col CollectionDescription) GetFieldByRelation( relationName string, otherCollectionName string, otherFieldName string, - schema *SchemaDescription, -) (SchemaFieldDescription, bool) { - for _, field := range schema.Fields { - if field.RelationName == relationName && +) (CollectionFieldDescription, bool) { + for _, field := range col.Fields { + if field.RelationName.Value() == relationName && !(col.Name.Value() == otherCollectionName && otherFieldName == field.Name) && - field.Kind != FieldKind_DocID { + field.Kind.Value() != FieldKind_DocID { return field, true } } - return SchemaFieldDescription{}, false + return CollectionFieldDescription{}, false } // QuerySources returns all the Sources of type [QuerySource] diff --git a/client/collection_field_description.go b/client/collection_field_description.go index 048cde24c0..98b012d641 100644 --- a/client/collection_field_description.go +++ b/client/collection_field_description.go @@ -10,7 +10,12 @@ package client -import "fmt" +import ( + "encoding/json" + "fmt" + + "github.com/sourcenetwork/immutable" +) // FieldID is a unique identifier for a field in a schema. type FieldID uint32 @@ -22,8 +27,52 @@ type CollectionFieldDescription struct { // ID contains the local, internal ID of this field. ID FieldID + + // Kind contains the local field kind if this is a local-only field (e.g. the secondary + // side of a relation). + // + // If the field is globaly defined (on the Schema), this will be [None]. + Kind immutable.Option[FieldKind] + + // RelationName contains the name of this relation, if this field is part of a relationship. + // + // Otherwise will be [None]. + RelationName immutable.Option[string] } func (f FieldID) String() string { return fmt.Sprint(uint32(f)) } + +// collectionFieldDescription is a private type used to facilitate the unmarshalling +// of json to a [CollectionFieldDescription]. +type collectionFieldDescription struct { + Name string + ID FieldID + RelationName immutable.Option[string] + + // Properties below this line are unmarshalled using custom logic in [UnmarshalJSON] + Kind json.RawMessage +} + +func (f *CollectionFieldDescription) UnmarshalJSON(bytes []byte) error { + var descMap collectionFieldDescription + err := json.Unmarshal(bytes, &descMap) + if err != nil { + return err + } + + f.Name = descMap.Name + f.ID = descMap.ID + f.RelationName = descMap.RelationName + kind, err := parseFieldKind(descMap.Kind) + if err != nil { + return err + } + + if kind != FieldKind_None { + f.Kind = immutable.Some(kind) + } + + return nil +} diff --git a/client/definitions.go b/client/definitions.go index a8ee52d5af..c04159f679 100644 --- a/client/definitions.go +++ b/client/definitions.go @@ -25,16 +25,28 @@ type CollectionDefinition struct { // GetFieldByName returns the field for the given field name. If such a field is found it // will return it and true, if it is not found it will return false. func (def CollectionDefinition) GetFieldByName(fieldName string) (FieldDefinition, bool) { - collectionField, ok := def.Description.GetFieldByName(fieldName) - if ok { - schemaField, ok := def.Schema.GetFieldByName(fieldName) - if ok { - return NewFieldDefinition( - collectionField, - schemaField, - ), true - } + collectionField, existsOnCollection := def.Description.GetFieldByName(fieldName) + schemaField, existsOnSchema := def.Schema.GetFieldByName(fieldName) + + if existsOnCollection && existsOnSchema { + return NewFieldDefinition( + collectionField, + schemaField, + ), true + } else if existsOnCollection && !existsOnSchema { + // If the field exists only on the collection, it is a local only field, for example the + // secondary side of a relation. + return NewLocalFieldDefinition( + collectionField, + ), true + } else if !existsOnCollection && existsOnSchema { + // If the field only exist on the schema it is likely that this is a schema-only object + // definition, for example for an embedded object. + return NewSchemaOnlyFieldDefinition( + schemaField, + ), true } + return FieldDefinition{}, false } @@ -42,6 +54,8 @@ func (def CollectionDefinition) GetFieldByName(fieldName string) (FieldDefinitio // as a single set. func (def CollectionDefinition) GetFields() []FieldDefinition { fields := []FieldDefinition{} + localFieldNames := map[string]struct{}{} + for _, localField := range def.Description.Fields { globalField, ok := def.Schema.GetFieldByName(localField.Name) if ok { @@ -49,11 +63,41 @@ func (def CollectionDefinition) GetFields() []FieldDefinition { fields, NewFieldDefinition(localField, globalField), ) + } else { + // This must be a local only field, for example the secondary side of a relation. + fields = append( + fields, + NewLocalFieldDefinition(localField), + ) + } + localFieldNames[localField.Name] = struct{}{} + } + + for _, schemaField := range def.Schema.Fields { + if _, ok := localFieldNames[schemaField.Name]; ok { + continue } + // This must be a global only field, for example on an embedded object. + fields = append( + fields, + NewSchemaOnlyFieldDefinition(schemaField), + ) } + return fields } +// GetName gets the name of this definition. +// +// If the collection description has a name (e.g. it is an active collection) it will return that, +// otherwise it will return the schema name. +func (def CollectionDefinition) GetName() string { + if def.Description.Name.HasValue() { + return def.Description.Name.Value() + } + return def.Schema.Name +} + // FieldDefinition describes the combined local and global set of properties that constitutes // a field on a collection. // @@ -94,13 +138,39 @@ type FieldDefinition struct { // NewFieldDefinition returns a new [FieldDefinition], combining the given local and global elements // into a single object. func NewFieldDefinition(local CollectionFieldDescription, global SchemaFieldDescription) FieldDefinition { + var kind FieldKind + if local.Kind.HasValue() { + kind = local.Kind.Value() + } else { + kind = global.Kind + } + return FieldDefinition{ Name: global.Name, ID: local.ID, - Kind: global.Kind, - RelationName: global.RelationName, + Kind: kind, + RelationName: local.RelationName.Value(), Typ: global.Typ, - IsPrimaryRelation: global.IsPrimaryRelation, + IsPrimaryRelation: kind.IsObject() && !kind.IsArray(), + } +} + +// NewLocalFieldDefinition returns a new [FieldDefinition] from the given local [CollectionFieldDescription]. +func NewLocalFieldDefinition(local CollectionFieldDescription) FieldDefinition { + return FieldDefinition{ + Name: local.Name, + ID: local.ID, + Kind: local.Kind.Value(), + RelationName: local.RelationName.Value(), + } +} + +// NewSchemaOnlyFieldDefinition returns a new [FieldDefinition] from the given global [SchemaFieldDescription]. +func NewSchemaOnlyFieldDefinition(global SchemaFieldDescription) FieldDefinition { + return FieldDefinition{ + Name: global.Name, + Kind: global.Kind, + Typ: global.Typ, } } diff --git a/client/document.go b/client/document.go index 531ccd42cd..2325328b13 100644 --- a/client/document.go +++ b/client/document.go @@ -66,28 +66,28 @@ type Document struct { // marks if document has unsaved changes isDirty bool - schemaDescription SchemaDescription + collectionDefinition CollectionDefinition } -func newEmptyDoc(sd SchemaDescription) *Document { +func newEmptyDoc(collectionDefinition CollectionDefinition) *Document { return &Document{ - fields: make(map[string]Field), - values: make(map[Field]*FieldValue), - schemaDescription: sd, + fields: make(map[string]Field), + values: make(map[Field]*FieldValue), + collectionDefinition: collectionDefinition, } } // NewDocWithID creates a new Document with a specified key. -func NewDocWithID(docID DocID, sd SchemaDescription) *Document { - doc := newEmptyDoc(sd) +func NewDocWithID(docID DocID, collectionDefinition CollectionDefinition) *Document { + doc := newEmptyDoc(collectionDefinition) doc.id = docID return doc } // NewDocFromMap creates a new Document from a data map. -func NewDocFromMap(data map[string]any, sd SchemaDescription) (*Document, error) { +func NewDocFromMap(data map[string]any, collectionDefinition CollectionDefinition) (*Document, error) { var err error - doc := newEmptyDoc(sd) + doc := newEmptyDoc(collectionDefinition) // check if document contains special _docID field k, hasDocID := data[request.DocIDFieldName] @@ -126,8 +126,8 @@ func IsJSONArray(obj []byte) bool { } // NewFromJSON creates a new instance of a Document from a raw JSON object byte array. -func NewDocFromJSON(obj []byte, sd SchemaDescription) (*Document, error) { - doc := newEmptyDoc(sd) +func NewDocFromJSON(obj []byte, collectionDefinition CollectionDefinition) (*Document, error) { + doc := newEmptyDoc(collectionDefinition) err := doc.SetWithJSON(obj) if err != nil { return nil, err @@ -141,7 +141,7 @@ func NewDocFromJSON(obj []byte, sd SchemaDescription) (*Document, error) { // ManyFromJSON creates a new slice of Documents from a raw JSON array byte array. // It will return an error if the given byte array is not a valid JSON array. -func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { +func NewDocsFromJSON(obj []byte, collectionDefinition CollectionDefinition) ([]*Document, error) { v, err := fastjson.ParseBytes(obj) if err != nil { return nil, err @@ -157,7 +157,7 @@ func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { if err != nil { return nil, err } - doc := newEmptyDoc(sd) + doc := newEmptyDoc(collectionDefinition) err = doc.setWithFastJSONObject(o) if err != nil { return nil, err @@ -176,7 +176,7 @@ func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { // and ensures it matches the supplied field description. // It will do any minor parsing, like dates, and return // the typed value again as an interface. -func validateFieldSchema(val any, field SchemaFieldDescription) (NormalValue, error) { +func validateFieldSchema(val any, field FieldDefinition) (NormalValue, error) { if field.Kind.IsNillable() { if val == nil { return NewNormalNil(field.Kind) @@ -187,7 +187,7 @@ func validateFieldSchema(val any, field SchemaFieldDescription) (NormalValue, er } if field.Kind.IsObjectArray() { - return nil, NewErrFieldOrAliasToFieldNotExist(field.Name) + return nil, NewErrFieldNotExist(field.Name) } if field.Kind.IsObject() { @@ -588,15 +588,15 @@ func (doc *Document) setWithFastJSONObject(obj *fastjson.Object) error { // Set the value of a field. func (doc *Document) Set(field string, value any) error { - fd, exists := doc.schemaDescription.GetFieldByName(field) + fd, exists := doc.collectionDefinition.GetFieldByName(field) if !exists { return NewErrFieldNotExist(field) } - if fd.IsRelation() && !fd.Kind.IsObjectArray() { + if fd.Kind.IsObject() && !fd.Kind.IsObjectArray() { if !strings.HasSuffix(field, request.RelatedObjectID) { field = field + request.RelatedObjectID } - fd, exists = doc.schemaDescription.GetFieldByName(field) + fd, exists = doc.collectionDefinition.GetFieldByName(field) if !exists { return NewErrFieldNotExist(field) } diff --git a/client/document_test.go b/client/document_test.go index 593876705f..a70e868e0e 100644 --- a/client/document_test.go +++ b/client/document_test.go @@ -16,6 +16,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/sourcenetwork/immutable" + ccid "github.com/sourcenetwork/defradb/core/cid" ) @@ -27,8 +29,22 @@ var ( pref = ccid.NewDefaultSHA256PrefixV1() - schemaDescriptions = []SchemaDescription{ - { + def = CollectionDefinition{ + Description: CollectionDescription{ + Name: immutable.Some("User"), + Fields: []CollectionFieldDescription{ + { + Name: "Name", + }, + { + Name: "Age", + }, + { + Name: "Custom", + }, + }, + }, + Schema: SchemaDescription{ Name: "User", Fields: []SchemaFieldDescription{ { @@ -52,7 +68,7 @@ var ( ) func TestNewFromJSON(t *testing.T) { - doc, err := NewDocFromJSON(testJSONObj, schemaDescriptions[0]) + doc, err := NewDocFromJSON(testJSONObj, def) if err != nil { t.Error("Error creating new doc from JSON:", err) return @@ -90,7 +106,7 @@ func TestNewFromJSON(t *testing.T) { } func TestSetWithJSON(t *testing.T) { - doc, err := NewDocFromJSON(testJSONObj, schemaDescriptions[0]) + doc, err := NewDocFromJSON(testJSONObj, def) if err != nil { t.Error("Error creating new doc from JSON:", err) return @@ -137,7 +153,7 @@ func TestSetWithJSON(t *testing.T) { } func TestNewDocsFromJSON_WithObjectInsteadOfArray_Error(t *testing.T) { - _, err := NewDocsFromJSON(testJSONObj, schemaDescriptions[0]) + _, err := NewDocsFromJSON(testJSONObj, def) require.ErrorContains(t, err, "value doesn't contain array; it contains object") } @@ -147,7 +163,7 @@ func TestNewFromJSON_WithValidJSONFieldValue_NoError(t *testing.T) { "Age": 26, "Custom": "{\"tree\":\"maple\", \"age\": 260}" }`) - doc, err := NewDocFromJSON(objWithJSONField, schemaDescriptions[0]) + doc, err := NewDocFromJSON(objWithJSONField, def) if err != nil { t.Error("Error creating new doc from JSON:", err) return @@ -177,7 +193,7 @@ func TestNewFromJSON_WithInvalidJSONFieldValue_Error(t *testing.T) { "Age": 26, "Custom": "{\"tree\":\"maple, \"age\": 260}" }`) - _, err := NewDocFromJSON(objWithJSONField, schemaDescriptions[0]) + _, err := NewDocFromJSON(objWithJSONField, def) require.ErrorContains(t, err, "invalid JSON payload. Payload: {\"tree\":\"maple, \"age\": 260}") } @@ -187,6 +203,6 @@ func TestNewFromJSON_WithInvalidJSONFieldValueSimpleString_Error(t *testing.T) { "Age": 26, "Custom": "blah" }`) - _, err := NewDocFromJSON(objWithJSONField, schemaDescriptions[0]) + _, err := NewDocFromJSON(objWithJSONField, def) require.ErrorContains(t, err, "invalid JSON payload. Payload: blah") } diff --git a/client/errors.go b/client/errors.go index dbc29ed78b..460392a030 100644 --- a/client/errors.go +++ b/client/errors.go @@ -22,9 +22,7 @@ const ( errParsingFailed string = "failed to parse argument" errUninitializeProperty string = "invalid state, required property is uninitialized" errMaxTxnRetries string = "reached maximum transaction reties" - errRelationOneSided string = "relation must be defined on both schemas" errCollectionNotFound string = "collection not found" - errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" errUnknownCRDT string = "unknown crdt" errCRDTKindMismatch string = "CRDT type %s can't be assigned to field kind %s" errInvalidCRDTType string = "CRDT type not supported" @@ -34,7 +32,6 @@ const ( errCanNotNormalizeValue string = "can not normalize value" errCanNotTurnNormalValueIntoArray string = "can not turn normal value into array" errCanNotMakeNormalNilFromFieldKind string = "can not make normal nil from field kind" - errPrimarySideNotDefined string = "primary side of relation not defined" ) // Errors returnable from this package. @@ -59,7 +56,6 @@ var ( ErrCanNotNormalizeValue = errors.New(errCanNotNormalizeValue) ErrCanNotTurnNormalValueIntoArray = errors.New(errCanNotTurnNormalValueIntoArray) ErrCanNotMakeNormalNilFromFieldKind = errors.New(errCanNotMakeNormalNilFromFieldKind) - ErrPrimarySideNotDefined = errors.New(errPrimarySideNotDefined) ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. @@ -132,14 +128,6 @@ func NewErrMaxTxnRetries(inner error) error { return errors.Wrap(errMaxTxnRetries, inner) } -func NewErrRelationOneSided(fieldName string, typeName string) error { - return errors.New( - errRelationOneSided, - errors.NewKV("Field", fieldName), - errors.NewKV("Type", typeName), - ) -} - func NewErrCollectionNotFoundForSchemaVersion(schemaVersionID string) error { return errors.New( errCollectionNotFound, @@ -161,11 +149,6 @@ func NewErrUnknownCRDT(cType CType) error { ) } -// NewErrFieldOrAliasToFieldNotExist returns an error indicating that the given field or an alias field does not exist. -func NewErrFieldOrAliasToFieldNotExist(name string) error { - return errors.New(errFieldOrAliasToFieldNotExist, errors.NewKV("Name", name)) -} - func NewErrInvalidCRDTType(name, crdtType string) error { return errors.New( errInvalidCRDTType, @@ -181,10 +164,3 @@ func NewErrCRDTKindMismatch(cType, kind string) error { func NewErrInvalidJSONPaylaod(payload string) error { return errors.New(errInvalidJSONPayload, errors.NewKV("Payload", payload)) } - -func NewErrPrimarySideNotDefined(relationName string) error { - return errors.New( - errPrimarySideNotDefined, - errors.NewKV("RelationName", relationName), - ) -} diff --git a/client/schema_description.go b/client/schema_description.go index 302fadf5e7..2d34b131b8 100644 --- a/client/schema_description.go +++ b/client/schema_description.go @@ -31,7 +31,14 @@ type SchemaDescription struct { // It is immutable. Name string - // Fields contains the fields within this Schema. + // Fields contains the fields globally defined across the node network within this Schema. + // + // Any [CollectionDescription]s that reference this [SchemaDescription] will have a field + // set that contains all of these fields, plus any local only fields (such as the secondary side + // of a relation). + // + // Embedded objects (including within Views) are schema-only, and as such fields of embedded + // objects will not have a corresponding [CollectionFieldDescription]. // // Currently new fields may be added after initial declaration, but they cannot be removed. Fields []SchemaFieldDescription diff --git a/client/schema_field_description.go b/client/schema_field_description.go index 7f945e3ab8..f317ace116 100644 --- a/client/schema_field_description.go +++ b/client/schema_field_description.go @@ -53,17 +53,10 @@ type SchemaFieldDescription struct { // Must contain a valid value. It is currently immutable. Kind FieldKind - // RelationName the name of the relationship that this field represents if this field is - // a relation field. Otherwise this will be empty. - RelationName string - // The CRDT Type of this field. If no type has been provided it will default to [LWW_REGISTER]. // // It is currently immutable. Typ CType - - // If true, this is the primary half of a relation, otherwise is false. - IsPrimaryRelation bool } // ScalarKind represents singular scalar field kinds, such as `Int`. @@ -278,16 +271,14 @@ var FieldKindStringToEnumMapping = map[string]FieldKind{ // IsRelation returns true if this field is a relation. func (f SchemaFieldDescription) IsRelation() bool { - return f.RelationName != "" + return f.Kind.IsObject() } // schemaFieldDescription is a private type used to facilitate the unmarshalling // of json to a [SchemaFieldDescription]. type schemaFieldDescription struct { - Name string - RelationName string - Typ CType - IsPrimaryRelation bool + Name string + Typ CType // Properties below this line are unmarshalled using custom logic in [UnmarshalJSON] Kind json.RawMessage @@ -301,53 +292,55 @@ func (f *SchemaFieldDescription) UnmarshalJSON(bytes []byte) error { } f.Name = descMap.Name - f.RelationName = descMap.RelationName f.Typ = descMap.Typ - f.IsPrimaryRelation = descMap.IsPrimaryRelation + f.Kind, err = parseFieldKind(descMap.Kind) + if err != nil { + return err + } + + return nil +} - if len(descMap.Kind) == 0 { - f.Kind = FieldKind_None - return nil +func parseFieldKind(bytes json.RawMessage) (FieldKind, error) { + if len(bytes) == 0 { + return FieldKind_None, nil } - if descMap.Kind[0] != '"' { + if bytes[0] != '"' { // If the Kind is not represented by a string, assume try to parse it to an int, as // that is the only other type we support. var intKind uint8 - err := json.Unmarshal(descMap.Kind, &intKind) + err := json.Unmarshal(bytes, &intKind) if err != nil { - return err + return nil, err } switch intKind { case uint8(FieldKind_BOOL_ARRAY), uint8(FieldKind_INT_ARRAY), uint8(FieldKind_FLOAT_ARRAY), uint8(FieldKind_STRING_ARRAY), uint8(FieldKind_NILLABLE_BOOL_ARRAY), uint8(FieldKind_NILLABLE_INT_ARRAY), uint8(FieldKind_NILLABLE_FLOAT_ARRAY), uint8(FieldKind_NILLABLE_STRING_ARRAY): - f.Kind = ScalarArrayKind(intKind) + return ScalarArrayKind(intKind), nil default: - f.Kind = ScalarKind(intKind) - } - } else { - var strKind string - err := json.Unmarshal(descMap.Kind, &strKind) - if err != nil { - return err + return ScalarKind(intKind), nil } + } - kind, ok := FieldKindStringToEnumMapping[strKind] - if ok { - f.Kind = kind - } else { - // If we don't find the string representation of this type in the - // scalar mapping, assume it is an object - if it is not, validation - // will catch this later. If it is unknown we have no way of telling - // as to whether the user thought it was a scalar or an object anyway. - if strKind[0] == '[' { - f.Kind = ObjectArrayKind(strings.Trim(strKind, "[]")) - } else { - f.Kind = ObjectKind(strKind) - } - } + var strKind string + err := json.Unmarshal(bytes, &strKind) + if err != nil { + return nil, err } - return nil + kind, ok := FieldKindStringToEnumMapping[strKind] + if ok { + return kind, nil + } + + // If we don't find the string representation of this type in the + // scalar mapping, assume it is an object - if it is not, validation + // will catch this later. If it is unknown we have no way of telling + // as to whether the user thought it was a scalar or an object anyway. + if strKind[0] == '[' { + return ObjectArrayKind(strings.Trim(strKind, "[]")), nil + } + return ObjectKind(strKind), nil } diff --git a/db/backup.go b/db/backup.go index cc2f732d7e..1353376f34 100644 --- a/db/backup.go +++ b/db/backup.go @@ -84,7 +84,7 @@ func (db *db) basicImport(ctx context.Context, filepath string) (err error) { delete(docMap, request.DocIDFieldName) delete(docMap, request.NewDocIDFieldName) - doc, err := client.NewDocFromMap(docMap, col.Schema()) + doc, err := client.NewDocFromMap(docMap, col.Definition()) if err != nil { return NewErrDocFromMap(err) } @@ -257,7 +257,7 @@ func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err refFieldName = field.Name + request.RelatedObjectID } - newForeignDoc, err := client.NewDocFromMap(oldForeignDoc, foreignCol.Schema()) + newForeignDoc, err := client.NewDocFromMap(oldForeignDoc, foreignCol.Definition()) if err != nil { return err } @@ -288,7 +288,7 @@ func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err delete(docM, refFieldName) } - newDoc, err := client.NewDocFromMap(docM, col.Schema()) + newDoc, err := client.NewDocFromMap(docM, col.Definition()) if err != nil { return err } diff --git a/db/backup_test.go b/db/backup_test.go index ec9bc947b5..486080db81 100644 --- a/db/backup_test.go +++ b/db/backup_test.go @@ -41,10 +41,10 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -56,7 +56,7 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -106,10 +106,10 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -121,7 +121,7 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -171,10 +171,10 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -186,7 +186,7 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -237,10 +237,10 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 31}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 31}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -252,10 +252,10 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Book") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"name": "John and the sourcerers' stone", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"name": "John and the sourcerers' stone", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Definition()) require.NoError(t, err) - doc4, err := client.NewDocFromJSON([]byte(`{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Schema()) + doc4, err := client.NewDocFromJSON([]byte(`{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -313,10 +313,10 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Definition()) require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Definition()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -328,7 +328,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Definition()) require.NoError(t, err) err = col2.Create(ctx, doc3) diff --git a/db/collection.go b/db/collection.go index 7b2305364a..e84530d3e7 100644 --- a/db/collection.go +++ b/db/collection.go @@ -85,6 +85,7 @@ func (c *collection) newFetcher() fetcher.Fetcher { func (db *db) createCollection( ctx context.Context, def client.CollectionDefinition, + newDefinitions []client.CollectionDefinition, ) (client.Collection, error) { schema := def.Schema desc := def.Description @@ -100,6 +101,36 @@ func (db *db) createCollection( } } + existingDefinitions, err := db.getAllActiveDefinitions(ctx) + if err != nil { + return nil, err + } + + schemaByName := map[string]client.SchemaDescription{} + for _, existingDefinition := range existingDefinitions { + schemaByName[existingDefinition.Schema.Name] = existingDefinition.Schema + } + for _, newDefinition := range newDefinitions { + schemaByName[newDefinition.Schema.Name] = newDefinition.Schema + } + + _, err = validateUpdateSchemaFields(schemaByName, client.SchemaDescription{}, schema) + if err != nil { + return nil, err + } + + definitionsByName := map[string]client.CollectionDefinition{} + for _, existingDefinition := range existingDefinitions { + definitionsByName[existingDefinition.GetName()] = existingDefinition + } + for _, newDefinition := range newDefinitions { + definitionsByName[newDefinition.GetName()] = newDefinition + } + err = db.validateNewCollection(def, definitionsByName) + if err != nil { + return nil, err + } + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) if err != nil { return nil, err @@ -122,9 +153,9 @@ func (db *db) createCollection( return nil, err } desc.SchemaVersionID = schema.VersionID - for _, globalField := range schema.Fields { + for _, localField := range desc.Fields { var fieldID uint64 - if globalField.Name == request.DocIDFieldName { + if localField.Name == request.DocIDFieldName { // There is no hard technical requirement for this, we just think it looks nicer // if the doc id is at the zero index. It makes it look a little nicer in commit // queries too. @@ -136,13 +167,12 @@ func (db *db) createCollection( } } - desc.Fields = append( - desc.Fields, - client.CollectionFieldDescription{ - Name: globalField.Name, - ID: client.FieldID(fieldID), - }, - ) + for i := range desc.Fields { + if desc.Fields[i].Name == localField.Name { + desc.Fields[i].ID = client.FieldID(fieldID) + break + } + } } desc, err = description.SaveCollection(ctx, txn, desc) @@ -226,9 +256,8 @@ func (db *db) updateSchema( idFieldName := field.Name + "_id" if _, ok := schema.GetFieldByName(idFieldName); !ok { schema.Fields = append(schema.Fields, client.SchemaFieldDescription{ - Name: idFieldName, - Kind: client.FieldKind_DocID, - RelationName: field.RelationName, + Name: idFieldName, + Kind: client.FieldKind_DocID, }) } } @@ -459,22 +488,12 @@ func validateUpdateSchemaFields( hasChanged = hasChanged || !fieldAlreadyExists if !fieldAlreadyExists && proposedField.Kind.IsObject() { - relatedDesc, relatedDescFound := descriptionsByName[proposedField.Kind.Underlying()] + _, relatedDescFound := descriptionsByName[proposedField.Kind.Underlying()] if !relatedDescFound { return false, NewErrFieldKindNotFound(proposedField.Name, proposedField.Kind.Underlying()) } - if proposedField.RelationName == "" { - return false, NewErrRelationalFieldMissingRelationName(proposedField.Name) - } - - if proposedField.IsPrimaryRelation { - if proposedField.Kind.IsObjectArray() { - return false, NewErrPrimarySideOnMany(proposedField.Name) - } - } - if proposedField.Kind.IsObject() && !proposedField.Kind.IsArray() { idFieldName := proposedField.Name + request.RelatedObjectID idField, idFieldFound := proposedDesc.GetFieldByName(idFieldName) @@ -482,36 +501,12 @@ func validateUpdateSchemaFields( if idField.Kind != client.FieldKind_DocID { return false, NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocID, idField.Kind) } - - if idField.RelationName == "" { - return false, NewErrRelationalFieldMissingRelationName(idField.Name) - } } } + } - var relatedFieldFound bool - var relatedField client.SchemaFieldDescription - for _, field := range relatedDesc.Fields { - if field.RelationName == proposedField.RelationName && - field.Kind != client.FieldKind_DocID && - !(relatedDesc.Name == proposedDesc.Name && field.Name == proposedField.Name) { - relatedFieldFound = true - relatedField = field - break - } - } - - if !relatedFieldFound { - return false, client.NewErrRelationOneSided(proposedField.Name, proposedField.Kind.Underlying()) - } - - if !(proposedField.IsPrimaryRelation || relatedField.IsPrimaryRelation) { - return false, client.NewErrPrimarySideNotDefined(proposedField.RelationName) - } - - if proposedField.IsPrimaryRelation && relatedField.IsPrimaryRelation { - return false, NewErrBothSidesPrimary(proposedField.RelationName) - } + if proposedField.Kind.IsObjectArray() { + return false, NewErrSecondaryFieldOnSchema(proposedField.Name) } if _, isDuplicate := newFieldNames[proposedField.Name]; isDuplicate { @@ -671,6 +666,153 @@ func (db *db) validateCollectionChanges( return nil } +var newCollectionValidators = []func( + client.CollectionDefinition, + map[string]client.CollectionDefinition, +) error{ + validateSecondaryFieldsPairUp, + validateRelationPointsToValidKind, + validateSingleSidePrimary, +} + +func (db *db) validateNewCollection( + def client.CollectionDefinition, + defsByName map[string]client.CollectionDefinition, +) error { + for _, validators := range newCollectionValidators { + err := validators(def, defsByName) + if err != nil { + return err + } + } + + return nil +} + +func validateRelationPointsToValidKind( + def client.CollectionDefinition, + defsByName map[string]client.CollectionDefinition, +) error { + for _, field := range def.Description.Fields { + if !field.Kind.HasValue() { + continue + } + + if !field.Kind.Value().IsObject() { + continue + } + + underlying := field.Kind.Value().Underlying() + _, ok := defsByName[underlying] + if !ok { + return NewErrFieldKindNotFound(field.Name, underlying) + } + } + + return nil +} + +func validateSecondaryFieldsPairUp( + def client.CollectionDefinition, + defsByName map[string]client.CollectionDefinition, +) error { + for _, field := range def.Description.Fields { + if !field.Kind.HasValue() { + continue + } + + if !field.Kind.Value().IsObject() { + continue + } + + if !field.RelationName.HasValue() { + continue + } + + _, hasSchemaField := def.Schema.GetFieldByName(field.Name) + if hasSchemaField { + continue + } + + underlying := field.Kind.Value().Underlying() + otherDef, ok := defsByName[underlying] + if !ok { + continue + } + + if len(otherDef.Description.Fields) == 0 { + // Views/embedded objects do not require both sides of the relation to be defined. + continue + } + + otherField, ok := otherDef.Description.GetFieldByRelation( + field.RelationName.Value(), + def.GetName(), + field.Name, + ) + if !ok { + return NewErrRelationMissingField(underlying, field.RelationName.Value()) + } + + _, ok = otherDef.Schema.GetFieldByName(otherField.Name) + if !ok { + // This secondary is paired with another secondary, which is invalid + return NewErrRelationMissingField(underlying, field.RelationName.Value()) + } + } + + return nil +} + +func validateSingleSidePrimary( + def client.CollectionDefinition, + defsByName map[string]client.CollectionDefinition, +) error { + for _, field := range def.Description.Fields { + if !field.Kind.HasValue() { + continue + } + + if !field.Kind.Value().IsObject() { + continue + } + + if !field.RelationName.HasValue() { + continue + } + + _, hasSchemaField := def.Schema.GetFieldByName(field.Name) + if !hasSchemaField { + // This is a secondary field and thus passes this rule + continue + } + + underlying := field.Kind.Value().Underlying() + otherDef, ok := defsByName[underlying] + if !ok { + continue + } + + otherField, ok := otherDef.Description.GetFieldByRelation( + field.RelationName.Value(), + def.GetName(), + field.Name, + ) + if !ok { + // This must be a one-sided relation, in which case it passes this rule + continue + } + + _, ok = otherDef.Schema.GetFieldByName(otherField.Name) + if ok { + // This primary is paired with another primary, which is invalid + return ErrMultipleRelationPrimaries + } + } + + return nil +} + func validateCollectionNameUnique( oldColsByID map[uint32]client.CollectionDescription, newColsByID map[uint32]client.CollectionDescription, @@ -1705,14 +1847,14 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( if err != nil { return err } - otherSchema := otherCol.Schema() otherObjFieldDescription, _ := otherCol.Description().GetFieldByRelation( fieldDescription.RelationName, c.Name().Value(), objFieldDescription.Name, - &otherSchema, ) - if !(otherObjFieldDescription.Kind.IsObject() && !otherObjFieldDescription.Kind.IsArray()) { + if !(otherObjFieldDescription.Kind.HasValue() && + otherObjFieldDescription.Kind.Value().IsObject() && + !otherObjFieldDescription.Kind.Value().IsArray()) { // If the other field is not an object field then this is not a one to one relation and we can continue return nil } @@ -1886,7 +2028,7 @@ func (c *collection) getDataStoreKeyFromDocID(docID client.DocID) core.DataStore } func (c *collection) tryGetFieldKey(primaryKey core.PrimaryDataStoreKey, fieldName string) (core.DataStoreKey, bool) { - fieldId, hasField := c.tryGetSchemaFieldID(fieldName) + fieldId, hasField := c.tryGetFieldID(fieldName) if !hasField { return core.DataStoreKey{}, false } @@ -1898,9 +2040,9 @@ func (c *collection) tryGetFieldKey(primaryKey core.PrimaryDataStoreKey, fieldNa }, true } -// tryGetSchemaFieldID returns the FieldID of the given fieldName. +// tryGetFieldID returns the FieldID of the given fieldName. // Will return false if the field is not found. -func (c *collection) tryGetSchemaFieldID(fieldName string) (uint32, bool) { +func (c *collection) tryGetFieldID(fieldName string) (uint32, bool) { for _, field := range c.Definition().GetFields() { if field.Name == fieldName { if field.Kind.IsObject() || field.Kind.IsObjectArray() { diff --git a/db/collection_get.go b/db/collection_get.go index 7f7d82f0ed..75d3d2826b 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -94,7 +94,7 @@ func (c *collection) get( return nil, nil } - doc, err := fetcher.Decode(encodedDoc, c.Schema()) + doc, err := fetcher.Decode(encodedDoc, c.Definition()) if err != nil { return nil, err } diff --git a/db/collection_index.go b/db/collection_index.go index 159e40b4e9..2327ae027a 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -358,7 +358,7 @@ func (c *collection) iterateAllDocs( break } - doc, err := fetcher.Decode(encodedDoc, c.Schema()) + doc, err := fetcher.Decode(encodedDoc, c.Definition()) if err != nil { return errors.Join(err, df.Close()) } diff --git a/db/collection_update.go b/db/collection_update.go index c4d56de158..9110976593 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -262,7 +262,7 @@ func (c *collection) updateWithFilter( // Get the document, and apply the patch docAsMap := docMap.ToMap(selectionPlan.Value()) - doc, err := client.NewDocFromMap(docAsMap, c.Schema()) + doc, err := client.NewDocFromMap(docAsMap, c.Definition()) if err != nil { return nil, err } @@ -323,13 +323,11 @@ func (c *collection) patchPrimaryDoc( if err != nil { return err } - primarySchema := primaryCol.Schema() primaryField, ok := primaryCol.Description().GetFieldByRelation( relationFieldDescription.RelationName, secondaryCollectionName, relationFieldDescription.Name, - &primarySchema, ) if !ok { return client.NewErrFieldNotExist(relationFieldDescription.RelationName) @@ -355,7 +353,7 @@ func (c *collection) patchPrimaryDoc( return nil } - pc := c.db.newCollection(primaryCol.Description(), primarySchema) + pc := c.db.newCollection(primaryCol.Description(), primaryCol.Schema()) err = pc.validateOneToOneLinkDoesntAlreadyExist( ctx, primaryDocID.String(), diff --git a/db/errors.go b/db/errors.go index da82fcb941..f917ee9724 100644 --- a/db/errors.go +++ b/db/errors.go @@ -30,9 +30,6 @@ const ( errCannotSetVersionID string = "setting the VersionID is not supported" errRelationalFieldInvalidRelationType string = "invalid RelationType" errRelationalFieldMissingIDField string = "missing id field for relation object field" - errRelationalFieldMissingRelationName string = "missing relation name" - errPrimarySideOnMany string = "cannot set the many side of a relation as primary" - errBothSidesPrimary string = "both sides of a relation cannot be primary" errRelatedFieldKindMismatch string = "invalid Kind of the related field" errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" errRelationalFieldIDInvalidType string = "relational id field of invalid kind" @@ -94,6 +91,8 @@ const ( errCollectionIDCannotBeZero string = "collection ID cannot be zero" errCollectionsCannotBeDeleted string = "collections cannot be deleted" errCanNotHavePolicyWithoutACP string = "can not specify policy on collection, without acp" + errSecondaryFieldOnSchema string = "secondary relation fields cannot be defined on the schema" + errRelationMissingField string = "relation missing field" ) var ( @@ -125,6 +124,9 @@ var ( ErrCollectionIDCannotBeZero = errors.New(errCollectionIDCannotBeZero) ErrCollectionsCannotBeDeleted = errors.New(errCollectionsCannotBeDeleted) ErrCanNotHavePolicyWithoutACP = errors.New(errCanNotHavePolicyWithoutACP) + ErrSecondaryFieldOnSchema = errors.New(errSecondaryFieldOnSchema) + ErrRelationMissingField = errors.New(errRelationMissingField) + ErrMultipleRelationPrimaries = errors.New("relation can only have a single field set as primary") ) // NewErrFailedToGetHeads returns a new error indicating that the heads of a document @@ -269,27 +271,6 @@ func NewErrRelationalFieldMissingIDField(name string, expectedName string) error ) } -func NewErrRelationalFieldMissingRelationName(name string) error { - return errors.New( - errRelationalFieldMissingRelationName, - errors.NewKV("Field", name), - ) -} - -func NewErrPrimarySideOnMany(name string) error { - return errors.New( - errPrimarySideOnMany, - errors.NewKV("Field", name), - ) -} - -func NewErrBothSidesPrimary(relationName string) error { - return errors.New( - errBothSidesPrimary, - errors.NewKV("RelationName", relationName), - ) -} - func NewErrRelatedFieldKindMismatch(relationName string, expected client.FieldKind, actual client.FieldKind) error { return errors.New( errRelatedFieldKindMismatch, @@ -628,3 +609,18 @@ func NewErrCollectionsCannotBeDeleted(colID uint32) error { errors.NewKV("CollectionID", colID), ) } + +func NewErrSecondaryFieldOnSchema(name string) error { + return errors.New( + errSecondaryFieldOnSchema, + errors.NewKV("Name", name), + ) +} + +func NewErrRelationMissingField(objectName, relationName string) error { + return errors.New( + errRelationMissingField, + errors.NewKV("Object", objectName), + errors.NewKV("RelationName", relationName), + ) +} diff --git a/db/fetcher/encoded_doc.go b/db/fetcher/encoded_doc.go index 889aea848a..cb4345abe1 100644 --- a/db/fetcher/encoded_doc.go +++ b/db/fetcher/encoded_doc.go @@ -106,13 +106,13 @@ func (encdoc *encodedDocument) Reset() { } // Decode returns a properly decoded document object -func Decode(encdoc EncodedDocument, sd client.SchemaDescription) (*client.Document, error) { +func Decode(encdoc EncodedDocument, collectionDefinition client.CollectionDefinition) (*client.Document, error) { docID, err := client.NewDocIDFromString(string(encdoc.ID())) if err != nil { return nil, err } - doc := client.NewDocWithID(docID, sd) + doc := client.NewDocWithID(docID, collectionDefinition) properties, err := encdoc.Properties(false) if err != nil { return nil, err diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 86746a9558..c3c1c6de7b 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -60,7 +60,7 @@ func (f *indexTestFixture) newUserDoc(name string, age int, col client.Collectio data, err := json.Marshal(d) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data, col.Schema()) + doc, err := client.NewDocFromJSON(data, col.Definition()) require.NoError(f.t, err) return doc } @@ -70,7 +70,7 @@ func (f *indexTestFixture) newProdDoc(id int, price float64, cat string, col cli data, err := json.Marshal(d) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data, col.Schema()) + doc, err := client.NewDocFromJSON(data, col.Definition()) require.NoError(f.t, err) return doc } @@ -339,7 +339,7 @@ func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { }{Age: 21, Weight: 154.1}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data, f.users.Schema()) + doc, err := client.NewDocFromJSON(data, f.users.Definition()) require.NoError(f.t, err) err = f.users.Create(f.ctx, doc) @@ -549,7 +549,7 @@ func TestNonUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) + doc, err := client.NewDocFromJSON(docJSON, f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1070,7 +1070,7 @@ func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) + doc, err := client.NewDocFromJSON(docJSON, f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1156,7 +1156,7 @@ func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) + doc, err := client.NewDocFromJSON(docJSON, f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1270,7 +1270,7 @@ func TestComposite_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) + doc, err := client.NewDocFromJSON(docJSON, f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1334,7 +1334,7 @@ func TestUniqueComposite_IfNilUpdateToValue_ShouldUpdateIndexStored(t *testing.T require.NoError(f.t, err) f.commitTxn() - doc, err := client.NewDocFromJSON([]byte(tc.Doc), f.users.Schema()) + doc, err := client.NewDocFromJSON([]byte(tc.Doc), f.users.Definition()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) diff --git a/db/schema.go b/db/schema.go index 6d52a92aee..756c02f1ff 100644 --- a/db/schema.go +++ b/db/schema.go @@ -51,7 +51,7 @@ func (db *db) addSchema( return nil, err } - col, err := db.createCollection(ctx, definition) + col, err := db.createCollection(ctx, definition, newDefinitions) if err != nil { return nil, err } diff --git a/db/view.go b/db/view.go index 5a778efd53..7cf040cbc5 100644 --- a/db/view.go +++ b/db/view.go @@ -80,7 +80,7 @@ func (db *db) addView( Schema: schema, } } else { - col, err := db.createCollection(ctx, definition) + col, err := db.createCollection(ctx, definition, newDefinitions) if err != nil { return nil, err } diff --git a/docs/data_format_changes/i2451-rel-field-props-local.md b/docs/data_format_changes/i2451-rel-field-props-local.md new file mode 100644 index 0000000000..ad34cc8965 --- /dev/null +++ b/docs/data_format_changes/i2451-rel-field-props-local.md @@ -0,0 +1,3 @@ +# Move relation field properties onto collection + +Field RelationName and secondary relation fields has been made local, and moved off of the schema and onto collection. Field IsPrimary has been removed completely (from the schema). As a result schema root and schema version id are no longer dependent on them. diff --git a/http/client_collection.go b/http/client_collection.go index d91601fbeb..a9f0681df7 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -398,7 +398,7 @@ func (c *Collection) Get( if err != nil { return nil, err } - doc := client.NewDocWithID(docID, c.def.Schema) + doc := client.NewDocWithID(docID, c.def) err = doc.SetWithJSON(data) if err != nil { return nil, err diff --git a/http/handler_ccip_test.go b/http/handler_ccip_test.go index 2a2cc4f077..b89517b975 100644 --- a/http/handler_ccip_test.go +++ b/http/handler_ccip_test.go @@ -203,7 +203,7 @@ func setupDatabase(t *testing.T) client.DB { col, err := cdb.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "bob"}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "bob"}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) diff --git a/http/handler_collection.go b/http/handler_collection.go index 745a05740b..bbcbaab365 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -49,7 +49,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { switch { case client.IsJSONArray(data): - docList, err := client.NewDocsFromJSON(data, col.Schema()) + docList, err := client.NewDocsFromJSON(data, col.Definition()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -61,7 +61,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { } rw.WriteHeader(http.StatusOK) default: - doc, err := client.NewDocFromJSON(data, col.Schema()) + doc, err := client.NewDocFromJSON(data, col.Definition()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return diff --git a/net/client_test.go b/net/client_test.go index 89c26e06b5..bedd28437d 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -22,13 +22,23 @@ import ( "github.com/sourcenetwork/defradb/events" ) -var sd = client.SchemaDescription{ - Name: "test", - Fields: []client.SchemaFieldDescription{ - { - Name: "test", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, +var def = client.CollectionDefinition{ + Description: client.CollectionDescription{ + Fields: []client.CollectionFieldDescription{ + { + ID: 1, + Name: "test", + }, + }, + }, + Schema: client.SchemaDescription{ + Name: "test", + Fields: []client.SchemaFieldDescription{ + { + Name: "test", + Kind: client.FieldKind_NILLABLE_STRING, + Typ: client.LWW_REGISTER, + }, }, }, } @@ -38,7 +48,7 @@ func TestPushlogWithDialFailure(t *testing.T) { _, n := newTestNode(ctx, t) defer n.Close() - doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), sd) + doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), def) require.NoError(t, err) id, err := doc.GenerateDocID() require.NoError(t, err) @@ -67,7 +77,7 @@ func TestPushlogWithInvalidPeerID(t *testing.T) { _, n := newTestNode(ctx, t) defer n.Close() - doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), sd) + doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), def) require.NoError(t, err) id, err := doc.GenerateDocID() require.NoError(t, err) @@ -110,7 +120,7 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { col, err := n1.db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "test"}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "test"}`), col.Definition()) require.NoError(t, err) err = col.Save(ctx, doc) diff --git a/net/dag_test.go b/net/dag_test.go index ddd9e9aab3..2072122b2d 100644 --- a/net/dag_test.go +++ b/net/dag_test.go @@ -63,7 +63,7 @@ func TestSendJobWorker_WithNewJob_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) @@ -107,7 +107,7 @@ func TestSendJobWorker_WithCloseJob_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) @@ -168,7 +168,7 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { col, err := db1.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) diff --git a/net/peer_test.go b/net/peer_test.go index ba06b40447..2ad5db9037 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -168,7 +168,7 @@ func TestNewPeer_WithExistingTopic_TopicAlreadyExistsError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -333,7 +333,7 @@ func TestRegisterNewDocument_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) cid, err := createCID(doc) @@ -357,7 +357,7 @@ func TestRegisterNewDocument_RPCTopicAlreadyRegisteredError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) @@ -575,7 +575,7 @@ func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *test col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -940,7 +940,7 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -993,7 +993,7 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -1023,7 +1023,7 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -1076,7 +1076,7 @@ func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -1120,7 +1120,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) diff --git a/net/server_test.go b/net/server_test.go index a1d2eeeb57..916e234109 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -133,7 +133,7 @@ func TestNewServerWithAddTopicError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -179,7 +179,7 @@ func TestNewServerWithEmitterError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -262,7 +262,7 @@ func TestPushLog(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) cid, err := createCID(doc) diff --git a/planner/create.go b/planner/create.go index 2918e9ee46..addf8e4d97 100644 --- a/planner/create.go +++ b/planner/create.go @@ -57,7 +57,7 @@ func (n *createNode) Kind() string { return "createNode" } func (n *createNode) Init() error { return nil } func (n *createNode) Start() error { - doc, err := client.NewDocFromMap(n.input, n.collection.Schema()) + doc, err := client.NewDocFromMap(n.input, n.collection.Definition()) if err != nil { n.err = err return err diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index 17b7f86611..af3542c403 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -125,7 +125,7 @@ func toSelect( store, rootSelectType, collectionName, - definition.Schema, + definition, mapping, fields, ) @@ -774,7 +774,7 @@ func getCollectionName( } hostFieldDesc, parentHasField := parentCollection.Definition().GetFieldByName(selectRequest.Name) - if parentHasField && hostFieldDesc.RelationName != "" { + if parentHasField && hostFieldDesc.Kind.IsObject() { // If this field exists on the parent, and it is a child object // then this collection name is the collection name of the child. return hostFieldDesc.Kind.Underlying(), nil @@ -1079,7 +1079,7 @@ func resolveSecondaryRelationIDs( store client.Store, rootSelectType SelectionType, collectionName string, - schema client.SchemaDescription, + schema client.CollectionDefinition, mapping *core.DocumentMapping, requestables []Requestable, ) ([]Requestable, error) { diff --git a/planner/type_join.go b/planner/type_join.go index dd2fae8e77..be1ebb628e 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -80,7 +80,7 @@ func (p *Planner) makeTypeIndexJoin( var joinPlan planNode var err error - typeFieldDesc, ok := parent.collection.Schema().GetFieldByName(subType.Name) + typeFieldDesc, ok := parent.collection.Definition().GetFieldByName(subType.Name) if !ok { return nil, client.NewErrFieldNotExist(subType.Name) } @@ -239,7 +239,7 @@ func (p *Planner) makeTypeJoinOne( } // get the correct sub field schema type (collection) - subTypeFieldDesc, ok := parent.collection.Schema().GetFieldByName(subType.Name) + subTypeFieldDesc, ok := parent.collection.Definition().GetFieldByName(subType.Name) if !ok { return nil, client.NewErrFieldNotExist(subType.Name) } @@ -248,13 +248,11 @@ func (p *Planner) makeTypeJoinOne( if err != nil { return nil, err } - subTypeSchema := subTypeCol.Schema() subTypeField, subTypeFieldNameFound := subTypeCol.Description().GetFieldByRelation( subTypeFieldDesc.RelationName, parent.collection.Name().Value(), subTypeFieldDesc.Name, - &subTypeSchema, ) if !subTypeFieldNameFound { return nil, client.NewErrFieldNotExist(subTypeFieldDesc.RelationName) @@ -271,7 +269,7 @@ func (p *Planner) makeTypeJoinOne( dir := joinDirection{ firstNode: source, secondNode: selectPlan, - secondaryField: subTypeField.Name + request.RelatedObjectID, + secondaryField: immutable.Some(subTypeField.Name + request.RelatedObjectID), primaryField: subTypeFieldDesc.Name + request.RelatedObjectID, } @@ -281,7 +279,7 @@ func (p *Planner) makeTypeJoinOne( root: source, subType: selectPlan, subSelect: subType, - rootName: subTypeField.Name, + rootName: immutable.Some(subTypeField.Name), subTypeName: subType.Name, isSecondary: !subTypeFieldDesc.IsPrimaryRelation, secondaryFieldIndex: secondaryFieldIndex, @@ -374,7 +372,7 @@ func (p *Planner) makeTypeJoinMany( return nil, err } - subTypeFieldDesc, ok := parent.collection.Schema().GetFieldByName(subType.Name) + subTypeFieldDesc, ok := parent.collection.Definition().GetFieldByName(subType.Name) if !ok { return nil, client.NewErrFieldNotExist(subType.Name) } @@ -383,23 +381,25 @@ func (p *Planner) makeTypeJoinMany( if err != nil { return nil, err } - subTypeSchema := subTypeCol.Schema() - rootField, rootNameFound := subTypeCol.Description().GetFieldByRelation( - subTypeFieldDesc.RelationName, - parent.collection.Name().Value(), - subTypeFieldDesc.Name, - &subTypeSchema, - ) - - if !rootNameFound { - return nil, client.NewErrFieldNotExist(subTypeFieldDesc.RelationName) + var secondaryFieldName immutable.Option[string] + var rootName immutable.Option[string] + if subTypeFieldDesc.RelationName != "" { + rootField, rootNameFound := subTypeCol.Description().GetFieldByRelation( + subTypeFieldDesc.RelationName, + parent.collection.Name().Value(), + subTypeFieldDesc.Name, + ) + if rootNameFound { + rootName = immutable.Some(rootField.Name) + secondaryFieldName = immutable.Some(rootField.Name + request.RelatedObjectID) + } } dir := joinDirection{ firstNode: source, secondNode: selectPlan, - secondaryField: rootField.Name + request.RelatedObjectID, + secondaryField: secondaryFieldName, primaryField: subTypeFieldDesc.Name + request.RelatedObjectID, } @@ -409,7 +409,7 @@ func (p *Planner) makeTypeJoinMany( root: source, subType: selectPlan, subSelect: subType, - rootName: rootField.Name, + rootName: rootName, isSecondary: true, subTypeName: subType.Name, secondaryFetchLimit: 0, @@ -457,15 +457,19 @@ func fetchPrimaryDoc(node, subNode planNode, parentProp string) (bool, error) { type joinDirection struct { firstNode planNode secondNode planNode - secondaryField string + secondaryField immutable.Option[string] primaryField string isInverted bool } func (dir *joinDirection) invert() { + if !dir.secondaryField.HasValue() { + // If the secondary field has no value it cannot be inverted + return + } dir.isInverted = !dir.isInverted dir.firstNode, dir.secondNode = dir.secondNode, dir.firstNode - dir.secondaryField, dir.primaryField = dir.primaryField, dir.secondaryField + dir.secondaryField, dir.primaryField = immutable.Some(dir.primaryField), dir.secondaryField.Value() } type invertibleTypeJoin struct { @@ -474,7 +478,7 @@ type invertibleTypeJoin struct { root planNode subType planNode - rootName string + rootName immutable.Option[string] subTypeName string subSelect *mapper.Select @@ -563,7 +567,9 @@ func (join *invertibleTypeJoin) Next() (bool, error) { if join.isSecondary { secondDocs, err := fetchDocsWithFieldValue( join.dir.secondNode, - join.dir.secondaryField, + // As the join is from the secondary field, we know that [join.dir.secondaryField] must have a value + // otherwise the user would not have been able to request it. + join.dir.secondaryField.Value(), firstDoc.GetID(), join.secondaryFetchLimit, ) @@ -599,8 +605,12 @@ func (join *invertibleTypeJoin) invertJoinDirectionWithIndex( fieldFilter *mapper.Filter, index client.IndexDescription, ) error { + if !join.rootName.HasValue() { + // If the root field has no value it cannot be inverted + return nil + } subScan := getScanNode(join.subType) - subScan.tryAddField(join.rootName + request.RelatedObjectID) + subScan.tryAddField(join.rootName.Value() + request.RelatedObjectID) subScan.filter = fieldFilter subScan.initFetcher(immutable.Option[string]{}, immutable.Some(index)) diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index 00a4f6503c..937a6e2973 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -14,6 +14,7 @@ import ( "context" "fmt" "sort" + "strings" "github.com/sourcenetwork/graphql-go/language/ast" gqlp "github.com/sourcenetwork/graphql-go/language/parser" @@ -51,13 +52,13 @@ func fromAst(doc *ast.Document) ( []client.CollectionDefinition, error, ) { - relationManager := NewRelationManager() definitions := []client.CollectionDefinition{} + cTypeByFieldNameByObjName := map[string]map[string]client.CType{} for _, def := range doc.Definitions { switch defType := def.(type) { case *ast.ObjectDefinition: - description, err := collectionFromAstDefinition(relationManager, defType) + description, err := collectionFromAstDefinition(defType, cTypeByFieldNameByObjName) if err != nil { return nil, err } @@ -65,7 +66,7 @@ func fromAst(doc *ast.Document) ( definitions = append(definitions, description) case *ast.InterfaceDefinition: - description, err := schemaFromAstDefinition(relationManager, defType) + description, err := schemaFromAstDefinition(defType, cTypeByFieldNameByObjName) if err != nil { return nil, err } @@ -87,7 +88,7 @@ func fromAst(doc *ast.Document) ( // The details on the relations between objects depend on both sides // of the relationship. The relation manager handles this, and must be applied // after all the collections have been processed. - err := finalizeRelations(relationManager, definitions) + err := finalizeRelations(definitions, cTypeByFieldNameByObjName) if err != nil { return nil, err } @@ -97,27 +98,38 @@ func fromAst(doc *ast.Document) ( // collectionFromAstDefinition parses a AST object definition into a set of collection descriptions. func collectionFromAstDefinition( - relationManager *RelationManager, def *ast.ObjectDefinition, + cTypeByFieldNameByObjName map[string]map[string]client.CType, ) (client.CollectionDefinition, error) { - fieldDescriptions := []client.SchemaFieldDescription{ + schemaFieldDescriptions := []client.SchemaFieldDescription{ { Name: request.DocIDFieldName, Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, } + collectionFieldDescriptions := []client.CollectionFieldDescription{ + { + Name: request.DocIDFieldName, + }, + } policyDescription := immutable.None[client.PolicyDescription]() indexDescriptions := []client.IndexDescription{} for _, field := range def.Fields { - tmpFieldsDescriptions, err := fieldsFromAST(field, relationManager, def.Name.Value) + tmpSchemaFieldDescriptions, tmpCollectionFieldDescriptions, err := fieldsFromAST( + field, + def.Name.Value, + cTypeByFieldNameByObjName, + false, + ) if err != nil { return client.CollectionDefinition{}, err } - fieldDescriptions = append(fieldDescriptions, tmpFieldsDescriptions...) + schemaFieldDescriptions = append(schemaFieldDescriptions, tmpSchemaFieldDescriptions...) + collectionFieldDescriptions = append(collectionFieldDescriptions, tmpCollectionFieldDescriptions...) for _, directive := range field.Directives { if directive.Name.Value == types.IndexDirectiveLabel { @@ -131,14 +143,23 @@ func collectionFromAstDefinition( } // sort the fields lexicographically - sort.Slice(fieldDescriptions, func(i, j int) bool { + sort.Slice(schemaFieldDescriptions, func(i, j int) bool { // make sure that the _docID is always at the beginning - if fieldDescriptions[i].Name == request.DocIDFieldName { + if schemaFieldDescriptions[i].Name == request.DocIDFieldName { return true - } else if fieldDescriptions[j].Name == request.DocIDFieldName { + } else if schemaFieldDescriptions[j].Name == request.DocIDFieldName { return false } - return fieldDescriptions[i].Name < fieldDescriptions[j].Name + return schemaFieldDescriptions[i].Name < schemaFieldDescriptions[j].Name + }) + sort.Slice(collectionFieldDescriptions, func(i, j int) bool { + // make sure that the _docID is always at the beginning + if collectionFieldDescriptions[i].Name == request.DocIDFieldName { + return true + } else if collectionFieldDescriptions[j].Name == request.DocIDFieldName { + return false + } + return collectionFieldDescriptions[i].Name < collectionFieldDescriptions[j].Name }) for _, directive := range def.Directives { @@ -163,22 +184,24 @@ func collectionFromAstDefinition( Name: immutable.Some(def.Name.Value), Indexes: indexDescriptions, Policy: policyDescription, + Fields: collectionFieldDescriptions, }, Schema: client.SchemaDescription{ Name: def.Name.Value, - Fields: fieldDescriptions, + Fields: schemaFieldDescriptions, }, }, nil } func schemaFromAstDefinition( - relationManager *RelationManager, def *ast.InterfaceDefinition, + cTypeByFieldNameByObjName map[string]map[string]client.CType, ) (client.SchemaDescription, error) { fieldDescriptions := []client.SchemaFieldDescription{} for _, field := range def.Fields { - tmpFieldsDescriptions, err := fieldsFromAST(field, relationManager, def.Name.Value) + // schema-only types do not have collection fields, so we can safely discard any returned here + tmpFieldsDescriptions, _, err := fieldsFromAST(field, def.Name.Value, cTypeByFieldNameByObjName, true) if err != nil { return client.SchemaDescription{}, err } @@ -322,75 +345,110 @@ func indexFromAST(directive *ast.Directive) (client.IndexDescription, error) { return desc, nil } -func fieldsFromAST(field *ast.FieldDefinition, - relationManager *RelationManager, +func fieldsFromAST( + field *ast.FieldDefinition, hostObjectName string, -) ([]client.SchemaFieldDescription, error) { + cTypeByFieldNameByObjName map[string]map[string]client.CType, + schemaOnly bool, +) ([]client.SchemaFieldDescription, []client.CollectionFieldDescription, error) { kind, err := astTypeToKind(field.Type) if err != nil { - return nil, err + return nil, nil, err + } + + cType, err := setCRDTType(field, kind) + if err != nil { + return nil, nil, err } - schema := "" - relationName := "" - relationType := relationType(0) + hostMap := cTypeByFieldNameByObjName[hostObjectName] + if hostMap == nil { + hostMap = map[string]client.CType{} + cTypeByFieldNameByObjName[hostObjectName] = hostMap + } + hostMap[field.Name.Value] = cType - fieldDescriptions := []client.SchemaFieldDescription{} + schemaFieldDescriptions := []client.SchemaFieldDescription{} + collectionFieldDescriptions := []client.CollectionFieldDescription{} if kind.IsObject() { - if !kind.IsArray() { - schema = field.Type.(*ast.Named).Name.Value - relationType = relation_Type_ONE - if _, exists := findDirective(field, "primary"); exists { - relationType |= relation_Type_Primary + relationName, err := getRelationshipName(field, hostObjectName, kind.Underlying()) + if err != nil { + return nil, nil, err + } + + if kind.IsArray() { + if schemaOnly { // todo - document and/or do better + schemaFieldDescriptions = append( + schemaFieldDescriptions, + client.SchemaFieldDescription{ + Name: field.Name.Value, + Kind: kind, + }, + ) + } else { + collectionFieldDescriptions = append( + collectionFieldDescriptions, + client.CollectionFieldDescription{ + Name: field.Name.Value, + Kind: immutable.Some(kind), + RelationName: immutable.Some(relationName), + }, + ) } } else { - schema = field.Type.(*ast.List).Type.(*ast.Named).Name.Value - relationType = relation_Type_MANY - } + idFieldName := fmt.Sprintf("%s_id", field.Name.Value) + + collectionFieldDescriptions = append( + collectionFieldDescriptions, + client.CollectionFieldDescription{ + Name: idFieldName, + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some(relationName), + }, + ) - relationName, err = getRelationshipName(field, hostObjectName, schema) - if err != nil { - return nil, err - } + collectionFieldDescriptions = append( + collectionFieldDescriptions, + client.CollectionFieldDescription{ + Name: field.Name.Value, + Kind: immutable.Some(kind), + RelationName: immutable.Some(relationName), + }, + ) - if !kind.IsArray() { - // An _id field is added for every 1-N relationship from this object. - fieldDescriptions = append(fieldDescriptions, client.SchemaFieldDescription{ - Name: fmt.Sprintf("%s_id", field.Name.Value), - Kind: client.FieldKind_DocID, - Typ: defaultCRDTForFieldKind[client.FieldKind_DocID], - RelationName: relationName, - }) + if _, exists := findDirective(field, "primary"); exists { + // Only primary fields exist on the schema. If primary is automatically set + // (e.g. for one-many) a later step will add this property. + schemaFieldDescriptions = append( + schemaFieldDescriptions, + client.SchemaFieldDescription{ + Name: field.Name.Value, + Kind: kind, + Typ: cType, + }, + ) + } } - - // Register the relationship so that the relationship manager can evaluate - // relationsip properties dependent on both collections in the relationship. - _, err := relationManager.RegisterSingle( - relationName, - schema, - field.Name.Value, - relationType, + } else { + schemaFieldDescriptions = append( + schemaFieldDescriptions, + client.SchemaFieldDescription{ + Name: field.Name.Value, + Kind: kind, + Typ: cType, + }, ) - if err != nil { - return nil, err - } - } - cType, err := setCRDTType(field, kind) - if err != nil { - return nil, err - } - - fieldDescription := client.SchemaFieldDescription{ - Name: field.Name.Value, - Kind: kind, - Typ: cType, - RelationName: relationName, + collectionFieldDescriptions = append( + collectionFieldDescriptions, + client.CollectionFieldDescription{ + Name: field.Name.Value, + }, + ) } - fieldDescriptions = append(fieldDescriptions, fieldDescription) - return fieldDescriptions, nil + return schemaFieldDescriptions, collectionFieldDescriptions, nil } // policyFromAST returns the policy description after parsing but the validation @@ -556,7 +614,23 @@ func getRelationshipName( return genRelationName(hostName, targetName) } -func finalizeRelations(relationManager *RelationManager, definitions []client.CollectionDefinition) error { +func genRelationName(t1, t2 string) (string, error) { + if t1 == "" || t2 == "" { + return "", client.NewErrUninitializeProperty("genRelationName", "relation types") + } + t1 = strings.ToLower(t1) + t2 = strings.ToLower(t2) + + if i := strings.Compare(t1, t2); i < 0 { + return fmt.Sprintf("%s_%s", t1, t2), nil + } + return fmt.Sprintf("%s_%s", t2, t1), nil +} + +func finalizeRelations( + definitions []client.CollectionDefinition, + cTypeByFieldNameByObjName map[string]map[string]client.CType, +) error { embeddedObjNames := map[string]struct{}{} for _, def := range definitions { if !def.Description.Name.HasValue() { @@ -564,35 +638,91 @@ func finalizeRelations(relationManager *RelationManager, definitions []client.Co } } - for _, definition := range definitions { - for i, field := range definition.Schema.Fields { - if field.RelationName == "" || field.Kind == client.FieldKind_DocID { + for i, definition := range definitions { + if _, ok := embeddedObjNames[definition.Description.Name.Value()]; ok { + // Embedded objects are simpler and require no addition work + continue + } + + for _, field := range definition.Description.Fields { + if !field.Kind.HasValue() || !field.Kind.Value().IsObject() || field.Kind.Value().IsArray() { + // We only need to process the primary side of a relation here, if the field is not a relation + // or if it is an array, we can skip it. continue } - rel, err := relationManager.GetRelation(field.RelationName) - if err != nil { - return err + var otherColDefinition immutable.Option[client.CollectionDefinition] + for _, otherDef := range definitions { + // Check the 'other' schema name, there can only be a one-one mapping in an SDL + // appart from embedded, which will be schema only. + if otherDef.Schema.Name == field.Kind.Value().Underlying() { + otherColDefinition = immutable.Some(otherDef) + break + } } - _, fieldRelationType, ok := rel.getField(field.Kind.Underlying(), field.Name) - if !ok { - return NewErrRelationMissingField(field.Kind.Underlying(), field.Name) + if !otherColDefinition.HasValue() { + // If the other collection is not found here we skip this field. Whilst this almost certainly means the SDL + // is invalid, validating anything beyond SDL syntax is not the responsibility of this package. + continue } - // if not finalized then we are missing one side of the relationship - // unless this is an embedded object, which only have single-sided relations - _, shouldBeOneSidedRelation := embeddedObjNames[field.Kind.Underlying()] - if shouldBeOneSidedRelation && rel.finalized { - return NewErrViewRelationMustBeOneSided(field.Name, field.Kind.Underlying()) + var otherColFieldDescription immutable.Option[client.CollectionFieldDescription] + for _, otherField := range otherColDefinition.Value().Description.Fields { + if otherField.RelationName.Value() == field.RelationName.Value() { + otherColFieldDescription = immutable.Some(otherField) + break + } + } + + if !otherColFieldDescription.HasValue() || otherColFieldDescription.Value().Kind.Value().IsArray() { + // Relations only defined on one side of the object are possible, and so if this is one of them + // or if the other side is an array, we need to add the field to the schema (is primary side). + definition.Schema.Fields = append( + definition.Schema.Fields, + client.SchemaFieldDescription{ + Name: field.Name, + Kind: field.Kind.Value(), + Typ: cTypeByFieldNameByObjName[definition.Schema.Name][field.Name], + }, + ) } - if !shouldBeOneSidedRelation && !rel.finalized { - return client.NewErrRelationOneSided(field.Name, field.Kind.Underlying()) + otherIsEmbedded := len(otherColDefinition.Value().Description.Fields) == 0 + if !otherIsEmbedded { + var schemaFieldIndex int + var schemaFieldExists bool + for i, schemaField := range definition.Schema.Fields { + if schemaField.Name == field.Name { + schemaFieldIndex = i + schemaFieldExists = true + break + } + } + + if schemaFieldExists { + idFieldName := fmt.Sprintf("%s_id", field.Name) + + if _, idFieldExists := definition.Schema.GetFieldByName(idFieldName); !idFieldExists { + existingFields := definition.Schema.Fields + definition.Schema.Fields = make([]client.SchemaFieldDescription, len(definition.Schema.Fields)+1) + copy(definition.Schema.Fields, existingFields[:schemaFieldIndex+1]) + copy(definition.Schema.Fields[schemaFieldIndex+2:], existingFields[schemaFieldIndex+1:]) + + // An _id field is added for every 1-1 or 1-N relationship from this object if the relation + // does not point to an embedded object. + // + // It is inserted immediately after the object field to make things nicer for the user. + definition.Schema.Fields[schemaFieldIndex+1] = client.SchemaFieldDescription{ + Name: idFieldName, + Kind: client.FieldKind_DocID, + Typ: defaultCRDTForFieldKind[client.FieldKind_DocID], + } + } + } } - field.IsPrimaryRelation = fieldRelationType.isSet(relation_Type_Primary) - definition.Schema.Fields[i] = field + definitions[i] = definition } } diff --git a/request/graphql/schema/descriptions_test.go b/request/graphql/schema/descriptions_test.go index 354109965c..320bef158a 100644 --- a/request/graphql/schema/descriptions_test.go +++ b/request/graphql/schema/descriptions_test.go @@ -36,6 +36,20 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("User"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "verified", + }, + }, }, Schema: client.SchemaDescription{ Name: "User", @@ -85,6 +99,20 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("User"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "verified", + }, + }, }, Schema: client.SchemaDescription{ Name: "User", @@ -116,6 +144,20 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "name", + }, + { + Name: "publisher", + }, + { + Name: "rating", + }, + }, }, Schema: client.SchemaDescription{ Name: "Author", @@ -165,131 +207,75 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Book"), Indexes: []client.IndexDescription{}, - }, - Schema: client.SchemaDescription{ - Name: "Book", - Fields: []client.SchemaFieldDescription{ + Fields: []client.CollectionFieldDescription{ { Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, }, { Name: "author", - RelationName: "author_book", - Kind: client.ObjectKind("Author"), - Typ: client.NONE_CRDT, + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), + RelationName: immutable.Some("author_book"), }, { - Name: "author_id", - Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Name: "author_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), }, { Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, }, { Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, - Typ: client.LWW_REGISTER, }, }, }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - }, Schema: client.SchemaDescription{ - Name: "Author", + Name: "Book", Fields: []client.SchemaFieldDescription{ { Name: "_docID", Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, - { - Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, - }, { Name: "name", Kind: client.FieldKind_NILLABLE_STRING, Typ: client.LWW_REGISTER, }, { - Name: "published", - RelationName: "author_book", - Kind: client.ObjectKind("Book"), - Typ: client.NONE_CRDT, - IsPrimaryRelation: true, - }, - { - Name: "published_id", - Kind: client.FieldKind_DocID, + Name: "rating", + Kind: client.FieldKind_NILLABLE_FLOAT, Typ: client.LWW_REGISTER, }, }, }, }, - }, - }, - { - description: "Multiple simple types", - sdl: ` - type User { - name: String - age: Int - verified: Boolean - } - - type Author { - name: String - publisher: String - rating: Float - } - `, - targetDescs: []client.CollectionDefinition{ { Description: client.CollectionDescription{ - Name: immutable.Some("User"), + Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, - }, - Schema: client.SchemaDescription{ - Name: "User", - Fields: []client.SchemaFieldDescription{ + Fields: []client.CollectionFieldDescription{ { Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, }, { Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, }, { Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, }, { - Name: "verified", - Kind: client.FieldKind_NILLABLE_BOOL, - Typ: client.LWW_REGISTER, + Name: "published", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), + RelationName: immutable.Some("author_book"), + }, + { + Name: "published_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), }, }, }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - }, Schema: client.SchemaDescription{ Name: "Author", Fields: []client.SchemaFieldDescription{ @@ -298,19 +284,24 @@ func TestSingleSimpleType(t *testing.T) { Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, + { + Name: "age", + Kind: client.FieldKind_NILLABLE_INT, + Typ: client.LWW_REGISTER, + }, { Name: "name", Kind: client.FieldKind_NILLABLE_STRING, Typ: client.LWW_REGISTER, }, { - Name: "publisher", - Kind: client.FieldKind_NILLABLE_STRING, + Name: "published", + Kind: client.ObjectKind("Book"), Typ: client.LWW_REGISTER, }, { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, + Name: "published_id", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, }, @@ -338,25 +329,35 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Book"), Indexes: []client.IndexDescription{}, - }, - Schema: client.SchemaDescription{ - Name: "Book", - Fields: []client.SchemaFieldDescription{ + Fields: []client.CollectionFieldDescription{ { Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, }, { Name: "author", - RelationName: "book_authors", - Kind: client.ObjectKind("Author"), - Typ: client.NONE_CRDT, + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), + RelationName: immutable.Some("book_authors"), }, { - Name: "author_id", + Name: "author_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("book_authors"), + }, + { + Name: "name", + }, + { + Name: "rating", + }, + }, + }, + Schema: client.SchemaDescription{ + Name: "Book", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -375,6 +376,27 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "published", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), + RelationName: immutable.Some("book_authors"), + }, + { + Name: "published_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("book_authors"), + }, + }, }, Schema: client.SchemaDescription{ Name: "Author", @@ -395,11 +417,9 @@ func TestSingleSimpleType(t *testing.T) { Typ: client.LWW_REGISTER, }, { - Name: "published", - RelationName: "book_authors", - Kind: client.ObjectKind("Book"), - Typ: client.NONE_CRDT, - IsPrimaryRelation: true, + Name: "published", + Kind: client.ObjectKind("Book"), + Typ: client.LWW_REGISTER, }, { Name: "published_id", @@ -431,6 +451,27 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Book"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "author", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), + RelationName: immutable.Some("author_book"), + }, + { + Name: "author_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), + }, + { + Name: "name", + }, + { + Name: "rating", + }, + }, }, Schema: client.SchemaDescription{ Name: "Book", @@ -441,11 +482,9 @@ func TestSingleSimpleType(t *testing.T) { Typ: client.NONE_CRDT, }, { - Name: "author", - RelationName: "author_book", - Kind: client.ObjectKind("Author"), - Typ: client.NONE_CRDT, - IsPrimaryRelation: true, + Name: "author", + Kind: client.ObjectKind("Author"), + Typ: client.LWW_REGISTER, }, { Name: "author_id", @@ -469,6 +508,27 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "published", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), + RelationName: immutable.Some("author_book"), + }, + { + Name: "published_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), + }, + }, }, Schema: client.SchemaDescription{ Name: "Author", @@ -488,17 +548,6 @@ func TestSingleSimpleType(t *testing.T) { Kind: client.FieldKind_NILLABLE_STRING, Typ: client.LWW_REGISTER, }, - { - Name: "published", - RelationName: "author_book", - Kind: client.ObjectKind("Book"), - Typ: client.NONE_CRDT, - }, - { - Name: "published_id", - Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, }, }, }, @@ -524,6 +573,27 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Book"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "author", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), + RelationName: immutable.Some("author_book"), + }, + { + Name: "author_id", + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("author_book"), + }, + { + Name: "name", + }, + { + Name: "rating", + }, + }, }, Schema: client.SchemaDescription{ Name: "Book", @@ -534,25 +604,23 @@ func TestSingleSimpleType(t *testing.T) { Typ: client.NONE_CRDT, }, { - Name: "author", - RelationName: "author_book", - Kind: client.ObjectKind("Author"), - Typ: client.NONE_CRDT, - IsPrimaryRelation: true, + Name: "name", + Kind: client.FieldKind_NILLABLE_STRING, + Typ: client.LWW_REGISTER, }, { - Name: "author_id", - Kind: client.FieldKind_DocID, + Name: "rating", + Kind: client.FieldKind_NILLABLE_FLOAT, Typ: client.LWW_REGISTER, }, { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, + Name: "author", + Kind: client.ObjectKind("Author"), Typ: client.LWW_REGISTER, }, { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, + Name: "author_id", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, }, @@ -562,6 +630,22 @@ func TestSingleSimpleType(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Author"), Indexes: []client.IndexDescription{}, + Fields: []client.CollectionFieldDescription{ + { + Name: "_docID", + }, + { + Name: "age", + }, + { + Name: "name", + }, + { + Name: "published", + Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("Book")), + RelationName: immutable.Some("author_book"), + }, + }, }, Schema: client.SchemaDescription{ Name: "Author", @@ -581,12 +665,6 @@ func TestSingleSimpleType(t *testing.T) { Kind: client.FieldKind_NILLABLE_STRING, Typ: client.LWW_REGISTER, }, - { - Name: "published", - RelationName: "author_book", - Kind: client.ObjectArrayKind("Book"), - Typ: client.NONE_CRDT, - }, }, }, }, @@ -608,6 +686,7 @@ func runCreateDescriptionTest(t *testing.T, testcase descriptionTestCase) { for i, d := range descs { assert.Equal(t, testcase.targetDescs[i].Description, d.Description, testcase.description) + assert.Equal(t, testcase.targetDescs[i].Schema, d.Schema, testcase.description) } } diff --git a/request/graphql/schema/errors.go b/request/graphql/schema/errors.go index 01f6e9bc9b..304df792e6 100644 --- a/request/graphql/schema/errors.go +++ b/request/graphql/schema/errors.go @@ -30,7 +30,6 @@ const ( errPolicyUnknownArgument string = "policy with unknown argument" errPolicyInvalidIDProp string = "policy directive with invalid id property" errPolicyInvalidResourceProp string = "policy directive with invalid resource property" - errViewRelationMustBeOneSided string = "relations in views must only be defined on one schema" ) var ( @@ -50,14 +49,13 @@ var ( ErrMultipleRelationPrimaries = errors.New("relation can only have a single field set as primary") // NonNull is the literal name of the GQL type, so we have to disable the linter //nolint:revive - ErrNonNullNotSupported = errors.New("NonNull fields are not currently supported") - ErrIndexMissingFields = errors.New(errIndexMissingFields) - ErrIndexWithUnknownArg = errors.New(errIndexUnknownArgument) - ErrIndexWithInvalidArg = errors.New(errIndexInvalidArgument) - ErrPolicyWithUnknownArg = errors.New(errPolicyUnknownArgument) - ErrPolicyInvalidIDProp = errors.New(errPolicyInvalidIDProp) - ErrPolicyInvalidResourceProp = errors.New(errPolicyInvalidResourceProp) - ErrViewRelationMustBeOneSided = errors.New(errViewRelationMustBeOneSided) + ErrNonNullNotSupported = errors.New("NonNull fields are not currently supported") + ErrIndexMissingFields = errors.New(errIndexMissingFields) + ErrIndexWithUnknownArg = errors.New(errIndexUnknownArgument) + ErrIndexWithInvalidArg = errors.New(errIndexInvalidArgument) + ErrPolicyWithUnknownArg = errors.New(errPolicyUnknownArgument) + ErrPolicyInvalidIDProp = errors.New(errPolicyInvalidIDProp) + ErrPolicyInvalidResourceProp = errors.New(errPolicyInvalidResourceProp) ) func NewErrDuplicateField(objectName, fieldName string) error { @@ -138,11 +136,3 @@ func NewErrRelationNotFound(relationName string) error { errors.NewKV("RelationName", relationName), ) } - -func NewErrViewRelationMustBeOneSided(fieldName string, typeName string) error { - return errors.New( - errViewRelationMustBeOneSided, - errors.NewKV("Field", fieldName), - errors.NewKV("Type", typeName), - ) -} diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index fc706041d8..6b7483be4f 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -414,7 +414,7 @@ func (g *Generator) buildTypes( // will be reassigned before the thunk is run // TODO remove when Go 1.22 collection := c - fieldDescriptions := collection.Schema.Fields + fieldDescriptions := collection.GetFields() isEmbeddedObject := !collection.Description.Name.HasValue() isQuerySource := len(collection.Description.QuerySources()) > 0 isViewObject := isEmbeddedObject || isQuerySource @@ -540,7 +540,6 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin // will be reassigned before the thunk is run // TODO remove when Go 1.22 collection := c - fieldDescriptions := collection.Schema.Fields mutationInputName := collection.Description.Name.Value() + "MutationInputArg" // check if mutation input type exists @@ -558,7 +557,7 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin mutationObjConf.Fields = (gql.InputObjectConfigFieldMapThunk)(func() (gql.InputObjectConfigFieldMap, error) { fields := make(gql.InputObjectConfigFieldMap) - for _, field := range fieldDescriptions { + for _, field := range collection.GetFields() { if strings.HasPrefix(field.Name, "_") { // ignore system defined args as the // user cannot override their values diff --git a/request/graphql/schema/relations.go b/request/graphql/schema/relations.go deleted file mode 100644 index 6d548ceebe..0000000000 --- a/request/graphql/schema/relations.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package schema - -import ( - "fmt" - "strings" - - "github.com/sourcenetwork/defradb/client" -) - -// relationType describes the type of relation between two types. -type relationType uint8 - -const ( - relation_Type_ONE relationType = 1 // 0b0000 0001 - relation_Type_MANY relationType = 2 // 0b0000 0010 - relation_Type_Primary relationType = 128 // 0b1000 0000 Primary reference entity on relation -) - -// IsSet returns true if the target relation type is set. -func (m relationType) isSet(target relationType) bool { - return m&target > 0 -} - -// RelationManager keeps track of all the relations that exist -// between schema types -type RelationManager struct { - relations map[string]*Relation -} - -func NewRelationManager() *RelationManager { - return &RelationManager{ - relations: make(map[string]*Relation), - } -} - -func (rm *RelationManager) GetRelation(name string) (*Relation, error) { - rel, ok := rm.relations[name] - if !ok { - return nil, NewErrRelationNotFound(name) - } - return rel, nil -} - -// RegisterSingle is used if you only know a single side of the relation -// at a time. It allows you to iteratively, across two calls, build the relation. -// If the relation exists and is finalized, then nothing is done. Returns true -// if nothing is done or the relation is successfully registered. -func (rm *RelationManager) RegisterSingle( - name string, - schemaType string, - schemaField string, - relType relationType, -) (bool, error) { - if name == "" { - return false, client.NewErrUninitializeProperty("RegisterSingle", "name") - } - - rel, ok := rm.relations[name] - if !ok { - // If a relation doesn't exist then make one. - rm.relations[name] = &Relation{ - name: name, - types: []relationType{relType}, - schemaTypes: []string{schemaType}, - fields: []string{schemaField}, - } - return true, nil - } - - if !rel.finalized { - // If a relation exists, and is not finalized, then finalizing it. - rel.types = append(rel.types, relType) - rel.schemaTypes = append(rel.schemaTypes, schemaType) - rel.fields = append(rel.fields, schemaField) - - if err := rel.finalize(); err != nil { - return false, err - } - rm.relations[name] = rel - } - - return true, nil -} - -type Relation struct { - name string - types []relationType - schemaTypes []string - fields []string - - // finalized indicates if we've properly - // updated both sides of the relation - finalized bool -} - -func (r *Relation) finalize() error { - // make sure all the types/fields are set - if len(r.types) != 2 || len(r.schemaTypes) != 2 || len(r.fields) != 2 { - return ErrRelationMissingTypes - } - - if isOne(r.types[0]) && isMany(r.types[1]) { - r.types[0] |= relation_Type_Primary // set primary on one - r.types[1] &^= relation_Type_Primary // clear primary on many - } else if isOne(r.types[1]) && isMany(r.types[0]) { - r.types[1] |= relation_Type_Primary // set primary on one - r.types[0] &^= relation_Type_Primary // clear primary on many - } else if isOne(r.types[1]) && isOne(r.types[0]) { - t1, t2 := r.types[0], r.types[1] - aBit := t1 & t2 - xBit := t1 ^ t2 - - // both types have primary set - if aBit.isSet(relation_Type_Primary) { - return ErrMultipleRelationPrimaries - } else if !xBit.isSet(relation_Type_Primary) { - return client.NewErrPrimarySideNotDefined(r.name) - } - } - - r.finalized = true - return nil -} - -func (r Relation) getField(schemaType string, field string) (string, relationType, bool) { - for i, f := range r.fields { - if f == field && r.schemaTypes[i] == schemaType { - return f, r.types[i], true - } - } - return "", relationType(0), false -} - -func genRelationName(t1, t2 string) (string, error) { - if t1 == "" || t2 == "" { - return "", client.NewErrUninitializeProperty("genRelationName", "relation types") - } - t1 = strings.ToLower(t1) - t2 = strings.ToLower(t2) - - if i := strings.Compare(t1, t2); i < 0 { - return fmt.Sprintf("%s_%s", t1, t2), nil - } - return fmt.Sprintf("%s_%s", t2, t1), nil -} - -// isOne returns true if the Relation_ONE bit is set -func isOne(fieldmeta relationType) bool { - return fieldmeta.isSet(relation_Type_ONE) -} - -// isMany returns true if the Relation_ONE bit is set -func isMany(fieldmeta relationType) bool { - return fieldmeta.isSet(relation_Type_MANY) -} diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index dac81d0ce2..186dbc0f3e 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -159,7 +159,7 @@ func BackfillBenchmarkDB( // create the documents docIDs := make([]client.DocID, numTypes) for j := 0; j < numTypes; j++ { - doc, err := client.NewDocFromJSON([]byte(docs[j]), cols[j].Schema()) + doc, err := client.NewDocFromJSON([]byte(docs[j]), cols[j].Definition()) if err != nil { errCh <- errors.Wrap("failed to create document from fixture", err) return diff --git a/tests/bench/collection/utils.go b/tests/bench/collection/utils.go index 59ab0577ba..24c90b28b5 100644 --- a/tests/bench/collection/utils.go +++ b/tests/bench/collection/utils.go @@ -178,7 +178,7 @@ func runCollectionBenchCreateMany( docs := make([]*client.Document, opCount) for j := 0; j < opCount; j++ { d, _ := fixture.GenerateDocs() - docs[j], _ = client.NewDocFromJSON([]byte(d[0]), collections[0].Schema()) + docs[j], _ = client.NewDocFromJSON([]byte(d[0]), collections[0].Definition()) } collections[0].CreateMany(ctx, docs) //nolint:errcheck @@ -201,7 +201,7 @@ func runCollectionBenchCreateSync(b *testing.B, for j := 0; j < runs; j++ { docs, _ := fixture.GenerateDocs() for k := 0; k < numTypes; k++ { - doc, _ := client.NewDocFromJSON([]byte(docs[k]), collections[k].Schema()) + doc, _ := client.NewDocFromJSON([]byte(docs[k]), collections[k].Definition()) collections[k].Create(ctx, doc) //nolint:errcheck } } @@ -240,7 +240,7 @@ func runCollectionBenchCreateAsync(b *testing.B, docs, _ := fixture.GenerateDocs() // create the documents for j := 0; j < numTypes; j++ { - doc, _ := client.NewDocFromJSON([]byte(docs[j]), collections[j].Schema()) + doc, _ := client.NewDocFromJSON([]byte(docs[j]), collections[j].Definition()) collections[j].Create(ctx, doc) //nolint:errcheck } diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index 4ff8692561..6a1764cc27 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -395,7 +395,7 @@ func (c *Collection) Get( if err != nil { return nil, err } - doc := client.NewDocWithID(docID, c.Schema()) + doc := client.NewDocWithID(docID, c.Definition()) err = doc.SetWithJSON(data) if err != nil { return nil, err diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go index 9b4acc440d..487558e934 100644 --- a/tests/gen/gen_auto.go +++ b/tests/gen/gen_auto.go @@ -119,7 +119,7 @@ func (g *randomDocGenerator) getMaxTotalDemand() int { } // getNextPrimaryDocID returns the docID of the next primary document to be used as a relation. -func (g *randomDocGenerator) getNextPrimaryDocID(secondaryType string, field *client.SchemaFieldDescription) string { +func (g *randomDocGenerator) getNextPrimaryDocID(secondaryType string, field *client.FieldDefinition) string { ind := g.configurator.usageCounter.getNextTypeIndForField(secondaryType, field) return g.generatedDocs[field.Kind.Underlying()][ind].docID } @@ -134,12 +134,12 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { totalDemand := currentTypeDemand.getAverage() for i := 0; i < totalDemand; i++ { newDoc := make(map[string]any) - for _, field := range typeDef.Schema.Fields { + for _, field := range typeDef.GetFields() { if field.Name == request.DocIDFieldName { continue } if field.IsRelation() { - if field.IsPrimaryRelation { + if field.IsPrimaryRelation && field.Kind.IsObject() { if strings.HasSuffix(field.Name, request.RelatedObjectID) { newDoc[field.Name] = g.getNextPrimaryDocID(typeName, &field) } else { @@ -151,7 +151,7 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { newDoc[field.Name] = g.generateRandomValue(typeName, field.Kind, fieldConf) } } - doc, err := client.NewDocFromMap(newDoc, typeDef.Schema) + doc, err := client.NewDocFromMap(newDoc, typeDef) if err != nil { return err } @@ -221,7 +221,7 @@ func validateDefinitions(definitions []client.CollectionDefinition) error { if def.Description.Name.Value() != def.Schema.Name { return NewErrIncompleteColDefinition("description name and schema name do not match") } - for _, field := range def.Schema.Fields { + for _, field := range def.GetFields() { if field.Name == "" { return NewErrIncompleteColDefinition("field name is empty") } diff --git a/tests/gen/gen_auto_config.go b/tests/gen/gen_auto_config.go index eab85dd318..b69dfb0b98 100644 --- a/tests/gen/gen_auto_config.go +++ b/tests/gen/gen_auto_config.go @@ -58,11 +58,12 @@ func validateConfig(types map[string]client.CollectionDefinition, configsMap con return newNotDefinedTypeErr(typeName) } for fieldName, fieldConfig := range typeConfigs { - fieldDef, hasField := typeDef.Schema.GetFieldByName(fieldName) + fieldDef, hasField := typeDef.GetFieldByName(fieldName) if !hasField { return NewErrInvalidConfiguration("field " + fieldName + " is not defined in the schema for type " + typeName) } + err := checkAndValidateMinMax(&fieldDef, &fieldConfig) if err != nil { return err @@ -82,7 +83,7 @@ func validateConfig(types map[string]client.CollectionDefinition, configsMap con return nil } -func checkAndValidateMinMax(field *client.SchemaFieldDescription, conf *genConfig) error { +func checkAndValidateMinMax(field *client.FieldDefinition, conf *genConfig) error { _, hasMin := conf.props["min"] if hasMin { var err error @@ -100,7 +101,7 @@ func checkAndValidateMinMax(field *client.SchemaFieldDescription, conf *genConfi return nil } -func checkAndValidateLen(field *client.SchemaFieldDescription, conf *genConfig) error { +func checkAndValidateLen(field *client.FieldDefinition, conf *genConfig) error { lenConf, hasLen := conf.props["len"] if hasLen { if field.Kind != client.FieldKind_NILLABLE_STRING { @@ -117,7 +118,7 @@ func checkAndValidateLen(field *client.SchemaFieldDescription, conf *genConfig) return nil } -func checkAndValidateRatio(field *client.SchemaFieldDescription, conf *genConfig) error { +func checkAndValidateRatio(field *client.FieldDefinition, conf *genConfig) error { ratioConf, hasRatio := conf.props["ratio"] if hasRatio { if field.Kind != client.FieldKind_NILLABLE_BOOL { diff --git a/tests/gen/gen_auto_configurator.go b/tests/gen/gen_auto_configurator.go index 7a17d74989..ec8c1ea881 100644 --- a/tests/gen/gen_auto_configurator.go +++ b/tests/gen/gen_auto_configurator.go @@ -65,7 +65,7 @@ func newTypeUsageCounter(random *rand.Rand) typeUsageCounters { // addRelationUsage adds a relation usage tracker for a foreign field. func (c *typeUsageCounters) addRelationUsage( secondaryType string, - field client.SchemaFieldDescription, + field client.FieldDefinition, minPerDoc, maxPerDoc, numDocs int, ) { primaryType := field.Kind.Underlying() @@ -81,7 +81,7 @@ func (c *typeUsageCounters) addRelationUsage( } // getNextTypeIndForField returns the next index to be used for a foreign field. -func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.SchemaFieldDescription) int { +func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.FieldDefinition) int { current := c.m[field.Kind.Underlying()][secondaryType][field.Name] return current.useNextDocIDIndex() } @@ -272,7 +272,7 @@ func (g *docsGenConfigurator) getDemandForPrimaryType( primaryGraph map[string][]string, ) (typeDemand, error) { primaryTypeDef := g.types[primaryType] - for _, field := range primaryTypeDef.Schema.Fields { + for _, field := range primaryTypeDef.GetFields() { if field.Kind.IsObject() && field.Kind.Underlying() == secondaryType { primaryDemand := typeDemand{min: secondaryDemand.min, max: secondaryDemand.max} minPerDoc, maxPerDoc := 1, 1 @@ -338,7 +338,7 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( primaryGraph map[string][]string, ) error { typeDef := g.types[typeName] - for _, field := range typeDef.Schema.Fields { + for _, field := range typeDef.GetFields() { if field.Kind.IsObject() && !field.IsPrimaryRelation { primaryDocDemand := g.docsDemand[typeName] newSecDemand := typeDemand{min: primaryDocDemand.min, max: primaryDocDemand.max} @@ -401,7 +401,7 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( func (g *docsGenConfigurator) initRelationUsages(secondaryType, primaryType string, minPerDoc, maxPerDoc int) { secondaryTypeDef := g.types[secondaryType] - for _, secondaryTypeField := range secondaryTypeDef.Schema.Fields { + for _, secondaryTypeField := range secondaryTypeDef.GetFields() { if secondaryTypeField.Kind.Underlying() == primaryType { g.usageCounter.addRelationUsage(secondaryType, secondaryTypeField, minPerDoc, maxPerDoc, g.docsDemand[primaryType].getAverage()) @@ -422,7 +422,7 @@ func getRelationGraph(types map[string]client.CollectionDefinition) map[string][ } for typeName, typeDef := range types { - for _, field := range typeDef.Schema.Fields { + for _, field := range typeDef.GetFields() { if field.Kind.IsObject() { if field.IsPrimaryRelation { primaryGraph[typeName] = appendUnique(primaryGraph[typeName], field.Kind.Underlying()) diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index 54212509e0..02cb45331b 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -1203,6 +1203,15 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Description: client.CollectionDescription{ Name: immutable.Some("User"), ID: 0, + Fields: []client.CollectionFieldDescription{ + { + Name: "name", + }, + { + Name: "device", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("Device")), + }, + }, }, Schema: client.SchemaDescription{ Name: "User", @@ -1211,10 +1220,6 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Name: "name", Kind: client.FieldKind_NILLABLE_INT, }, - { - Name: "device", - Kind: client.ObjectKind("Device"), - }, }, }, }, @@ -1222,6 +1227,15 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Description: client.CollectionDescription{ Name: immutable.Some("Device"), ID: 1, + Fields: []client.CollectionFieldDescription{ + { + Name: "model", + }, + { + Name: "owner", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + }, + }, }, Schema: client.SchemaDescription{ Name: "Device", @@ -1231,9 +1245,8 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. Kind: client.FieldKind_NILLABLE_STRING, }, { - Name: "owner", - Kind: client.ObjectKind("User"), - IsPrimaryRelation: true, + Name: "owner", + Kind: client.ObjectKind("User"), }, }, }, @@ -1267,6 +1280,7 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. name: "field name is empty", changeDefs: func(defs []client.CollectionDefinition) { defs[0].Schema.Fields[0].Name = "" + defs[0].Description.Fields[0].Name = "" }, }, { @@ -1304,6 +1318,22 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("User"), ID: 0, + Fields: []client.CollectionFieldDescription{ + { + Name: "name", + }, + { + Name: "age", + }, + { + Name: "rating", + }, + { + Name: "devices", + Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("Device")), + RelationName: immutable.Some("Device_owner"), + }, + }, }, Schema: client.SchemaDescription{ Name: "User", @@ -1320,11 +1350,6 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Name: "rating", Kind: client.FieldKind_NILLABLE_FLOAT, }, - { - Name: "devices", - Kind: client.ObjectArrayKind("Device"), - RelationName: "Device_owner", - }, }, }, }, @@ -1332,6 +1357,20 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Description: client.CollectionDescription{ Name: immutable.Some("Device"), ID: 1, + Fields: []client.CollectionFieldDescription{ + { + Name: "model", + }, + { + Name: "owner", + Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + RelationName: immutable.Some("Device_owner"), + }, + { + Name: "owner_id", + RelationName: immutable.Some("Device_owner"), + }, + }, }, Schema: client.SchemaDescription{ Name: "Device", @@ -1341,9 +1380,14 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Kind: client.FieldKind_NILLABLE_STRING, }, { - Name: "owner_id", - Kind: client.FieldKind_DocID, - RelationName: "Device_owner", + Name: "owner", + Kind: client.ObjectKind("User"), + Typ: client.LWW_REGISTER, + }, + { + Name: "owner_id", + Kind: client.FieldKind_DocID, + Typ: client.LWW_REGISTER, }, }, }, diff --git a/tests/integration/collection/update/simple/with_doc_id_test.go b/tests/integration/collection/update/simple/with_doc_id_test.go index cea7117682..c5e402e5f4 100644 --- a/tests/integration/collection/update/simple/with_doc_id_test.go +++ b/tests/integration/collection/update/simple/with_doc_id_test.go @@ -26,7 +26,7 @@ func TestUpdateWithDocID(t *testing.T) { "age": 21 }` - doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"].Schema) + doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"]) if err != nil { assert.Fail(t, err.Error()) } diff --git a/tests/integration/collection/update/simple/with_doc_ids_test.go b/tests/integration/collection/update/simple/with_doc_ids_test.go index 2469eeee56..880ae3d603 100644 --- a/tests/integration/collection/update/simple/with_doc_ids_test.go +++ b/tests/integration/collection/update/simple/with_doc_ids_test.go @@ -26,7 +26,7 @@ func TestUpdateWithDocIDs(t *testing.T) { "age": 21 }` - doc1, err := client.NewDocFromJSON([]byte(docStr1), colDefMap["Users"].Schema) + doc1, err := client.NewDocFromJSON([]byte(docStr1), colDefMap["Users"]) if err != nil { assert.Fail(t, err.Error()) } @@ -36,7 +36,7 @@ func TestUpdateWithDocIDs(t *testing.T) { "age": 32 }` - doc2, err := client.NewDocFromJSON([]byte(docStr2), colDefMap["Users"].Schema) + doc2, err := client.NewDocFromJSON([]byte(docStr2), colDefMap["Users"]) if err != nil { assert.Fail(t, err.Error()) } diff --git a/tests/integration/collection/update/simple/with_filter_test.go b/tests/integration/collection/update/simple/with_filter_test.go index bbcfc5b8bc..ebe45e0b4f 100644 --- a/tests/integration/collection/update/simple/with_filter_test.go +++ b/tests/integration/collection/update/simple/with_filter_test.go @@ -74,7 +74,7 @@ func TestUpdateWithFilter(t *testing.T) { "age": 21 }` - doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"].Schema) + doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"]) if err != nil { assert.Fail(t, err.Error()) } diff --git a/tests/integration/collection/utils.go b/tests/integration/collection/utils.go index b8bf1cf46b..497637a5c3 100644 --- a/tests/integration/collection/utils.go +++ b/tests/integration/collection/utils.go @@ -86,7 +86,7 @@ func setupDatabase( } for _, docStr := range docs { - doc, err := client.NewDocFromJSON([]byte(docStr), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(docStr), col.Definition()) if assertError(t, testCase.Description, err, testCase.ExpectedError) { return } diff --git a/tests/integration/events/simple/with_create_test.go b/tests/integration/events/simple/with_create_test.go index ec5c174106..c3f88eea58 100644 --- a/tests/integration/events/simple/with_create_test.go +++ b/tests/integration/events/simple/with_create_test.go @@ -28,7 +28,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { "name": "John" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID1 := doc1.ID().String() @@ -39,7 +39,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { "name": "Shahzad" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID2 := doc2.ID().String() diff --git a/tests/integration/events/simple/with_delete_test.go b/tests/integration/events/simple/with_delete_test.go index b02b2505e1..141965966f 100644 --- a/tests/integration/events/simple/with_delete_test.go +++ b/tests/integration/events/simple/with_delete_test.go @@ -28,7 +28,7 @@ func TestEventsSimpleWithDelete(t *testing.T) { "name": "John" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID1 := doc1.ID().String() diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index 723421f91b..d224690827 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -28,7 +28,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { "name": "John" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID1 := doc1.ID().String() @@ -39,7 +39,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { "name": "Shahzad" }`, ), - colDefMap["Users"].Schema, + colDefMap["Users"], ) assert.Nil(t, err) docID2 := doc2.ID().String() @@ -66,14 +66,14 @@ func TestEventsSimpleWithUpdate(t *testing.T) { ExpectedUpdates: []testUtils.ExpectedUpdate{ { DocID: immutable.Some(docID1), - Cid: immutable.Some("bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste"), + Cid: immutable.Some("bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu"), }, { DocID: immutable.Some(docID2), }, { DocID: immutable.Some(docID1), - Cid: immutable.Some("bafybeidpwcpixokptqamh7qvngbrm335mvrzs3skrlwdmkq6nmqesoj4sm"), + Cid: immutable.Some("bafybeifhmjw6ay5rvwznqh37ogcw5hrmqtxrnredoh6psn7lhgtdc253km"), }, }, } diff --git a/tests/integration/events/utils.go b/tests/integration/events/utils.go index d2bf418294..8b998d0051 100644 --- a/tests/integration/events/utils.go +++ b/tests/integration/events/utils.go @@ -149,7 +149,7 @@ func setupDatabase( require.NoError(t, err) for _, docStr := range docs { - doc, err := client.NewDocFromJSON([]byte(docStr), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(docStr), col.Definition()) require.NoError(t, err) err = col.Save(ctx, doc) diff --git a/tests/integration/explain/default/type_join_many_test.go b/tests/integration/explain/default/type_join_many_test.go index 3b700b132b..031b509950 100644 --- a/tests/integration/explain/default/type_join_many_test.go +++ b/tests/integration/explain/default/type_join_many_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -53,7 +55,7 @@ func TestDefaultExplainRequestWithAOneToManyJoin(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, diff --git a/tests/integration/explain/default/type_join_one_test.go b/tests/integration/explain/default/type_join_one_test.go index 8a7fac0925..3059bf8528 100644 --- a/tests/integration/explain/default/type_join_one_test.go +++ b/tests/integration/explain/default/type_join_one_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -54,7 +56,7 @@ func TestDefaultExplainRequestWithAOneToOneJoin(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "contact", }, }, @@ -163,7 +165,7 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "contact", }, }, @@ -196,7 +198,7 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "contact", + "rootName": immutable.Some("contact"), "subTypeName": "address", }, }, diff --git a/tests/integration/explain/default/type_join_test.go b/tests/integration/explain/default/type_join_test.go index fd1676aed9..c09c1b0f12 100644 --- a/tests/integration/explain/default/type_join_test.go +++ b/tests/integration/explain/default/type_join_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -86,7 +88,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "contact", }, }, @@ -144,7 +146,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, @@ -204,7 +206,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "direction": "primary", "joinType": "typeJoinOne", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "contact", }, }, diff --git a/tests/integration/explain/default/with_average_join_test.go b/tests/integration/explain/default/with_average_join_test.go index 265ca932ce..d1cd68046e 100644 --- a/tests/integration/explain/default/with_average_join_test.go +++ b/tests/integration/explain/default/with_average_join_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -96,7 +98,7 @@ func TestDefaultExplainRequestWithAverageOnJoinedField(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -253,7 +255,7 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -299,7 +301,7 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, diff --git a/tests/integration/explain/default/with_count_join_test.go b/tests/integration/explain/default/with_count_join_test.go index 3f7802820d..4833354bba 100644 --- a/tests/integration/explain/default/with_count_join_test.go +++ b/tests/integration/explain/default/with_count_join_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -66,7 +68,7 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedField(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -175,7 +177,7 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t * IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -217,7 +219,7 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t * IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, diff --git a/tests/integration/explain/default/with_sum_join_test.go b/tests/integration/explain/default/with_sum_join_test.go index 5117031959..0889b3bd85 100644 --- a/tests/integration/explain/default/with_sum_join_test.go +++ b/tests/integration/explain/default/with_sum_join_test.go @@ -13,6 +13,8 @@ package test_explain_default import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) @@ -70,7 +72,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedField(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -165,7 +167,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithFilter(t *testing IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, @@ -280,7 +282,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithManySources(t *te IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "books", }, }, @@ -322,7 +324,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedFieldWithManySources(t *te IncludeChildNodes: false, ExpectedAttributes: dataMap{ "joinType": "typeJoinMany", - "rootName": "author", + "rootName": immutable.Some("author"), "subTypeName": "articles", }, }, diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go index b500ce1daf..943916f1ed 100644 --- a/tests/integration/mutation/create/with_version_test.go +++ b/tests/integration/mutation/create/with_version_test.go @@ -39,7 +39,7 @@ func TestMutationCreate_ReturnsVersionCID(t *testing.T) { { "_version": []map[string]any{ { - "cid": "bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste", + "cid": "bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu", }, }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go index bee050d1ae..260a9a7b70 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go @@ -41,7 +41,7 @@ func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *test "name": "John", "age": 30 }` - doc1, err := client.NewDocFromJSON([]byte(jsonString1), colDefMap["Author"].Schema) + doc1, err := client.NewDocFromJSON([]byte(jsonString1), colDefMap["Author"]) require.NoError(t, err) jsonString2 := fmt.Sprintf(`{ @@ -49,7 +49,7 @@ func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *test "rating": 9.9, "author_id": "%s" }`, doc1.ID()) - doc2, err := client.NewDocFromJSON([]byte(jsonString2), colDefMap["Book"].Schema) + doc2, err := client.NewDocFromJSON([]byte(jsonString2), colDefMap["Book"]) require.NoError(t, err) jsonString3 := fmt.Sprintf(`{ diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go index 751ca67b78..d3df327de2 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go @@ -65,7 +65,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio }`, bookID, ), - ExpectedError: "The given field or alias to field does not exist. Name: published", + ExpectedError: "The given field does not exist. Name: published", }, }, } @@ -118,7 +118,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *te }`, bookID, ), - ExpectedError: "The given field or alias to field does not exist. Name: published", + ExpectedError: "The given field does not exist. Name: published", }, }, } diff --git a/tests/integration/net/order/tcp_test.go b/tests/integration/net/order/tcp_test.go index 256db2f442..ef18668d20 100644 --- a/tests/integration/net/order/tcp_test.go +++ b/tests/integration/net/order/tcp_test.go @@ -141,7 +141,7 @@ func TestP2FullPReplicator(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{ "Name": "John", "Age": 21 - }`), colDefMap[userCollection].Schema) + }`), colDefMap[userCollection]) require.NoError(t, err) test := P2PTestCase{ diff --git a/tests/integration/net/order/utils.go b/tests/integration/net/order/utils.go index e744c4735d..2373037b62 100644 --- a/tests/integration/net/order/utils.go +++ b/tests/integration/net/order/utils.go @@ -140,7 +140,7 @@ func seedDocument( return client.DocID{}, err } - doc, err := client.NewDocFromJSON([]byte(document), col.Schema()) + doc, err := client.NewDocFromJSON([]byte(document), col.Definition()) if err != nil { return client.DocID{}, err } diff --git a/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go b/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go index 8fd73fe06a..b5990a050f 100644 --- a/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go +++ b/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go @@ -76,7 +76,7 @@ func TestP2PSubscribeAddGetMultiple(t *testing.T) { }, testUtils.GetAllP2PCollections{ NodeID: 1, - ExpectedCollectionIDs: []int{2, 0}, + ExpectedCollectionIDs: []int{0, 2}, }, }, } diff --git a/tests/integration/net/state/simple/replicator/with_create_test.go b/tests/integration/net/state/simple/replicator/with_create_test.go index 9fee99880a..0d3dbad143 100644 --- a/tests/integration/net/state/simple/replicator/with_create_test.go +++ b/tests/integration/net/state/simple/replicator/with_create_test.go @@ -492,7 +492,7 @@ func TestP2POneToOneReplicatorOrderIndependent(t *testing.T) { "name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", + "schemaVersionId": "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", }, }, }, @@ -552,7 +552,7 @@ func TestP2POneToOneReplicatorOrderIndependentDirectCreate(t *testing.T) { "_docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", "_version": []map[string]any{ { - "schemaVersionId": "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", + "schemaVersionId": "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", }, }, }, diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go index 7297f7fa4a..b90d5d0ea4 100644 --- a/tests/integration/query/commits/simple_test.go +++ b/tests/integration/query/commits/simple_test.go @@ -36,13 +36,13 @@ func TestQueryCommits(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -79,22 +79,22 @@ func TestQueryCommitsMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiev2thtqxttuhr3aq5dvyb3aif4cey7werksskw5xuetmwxjxi7ty", + "cid": "bafybeihnalsemihsyycy3vaxbhq6iqrixmsk5k3idq52u76h2f5wkvobx4", }, { - "cid": "bafybeifos5iir63tmp3bdoj7zr5aand4ud2tf2qfnjlh6nvrzw3knkewuy", + "cid": "bafybeifxk5rhzuemqn2o35hh7346gydqlfmhkdzeguiqo5vczgyz4xz7rm", }, { - "cid": "bafybeicjb4x47xk6koh4uhgokhjr5zbg3bhbcfoa4um4vdktnbhrsx6d2a", + "cid": "bafybeig36zwhejk54nvvey5wsfbl7rzm7xscsyji5uqp6j4hw4zh7dhep4", }, { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -125,16 +125,16 @@ func TestQueryCommitsWithSchemaVersionIdField(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", - "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", - "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", - "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, }, @@ -349,7 +349,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { `, Results: []map[string]any{ { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "collectionID": int64(1), "delta": testUtils.CBORValue(22), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -358,13 +358,13 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "_head", }, }, }, { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "collectionID": int64(1), "delta": testUtils.CBORValue(21), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -374,7 +374,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "collectionID": int64(1), "delta": testUtils.CBORValue("John"), "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -384,7 +384,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "collectionID": int64(1), "delta": nil, "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -393,17 +393,17 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "name": "_head", }, { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "name": "age", }, }, }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "collectionID": int64(1), "delta": nil, "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", @@ -412,11 +412,11 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_cid_test.go b/tests/integration/query/commits/with_cid_test.go index 30eab52b47..22f2caa5c2 100644 --- a/tests/integration/query/commits/with_cid_test.go +++ b/tests/integration/query/commits/with_cid_test.go @@ -38,14 +38,14 @@ func TestQueryCommitsWithCid(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva" + cid: "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -71,14 +71,14 @@ func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva" + cid: "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, diff --git a/tests/integration/query/commits/with_depth_test.go b/tests/integration/query/commits/with_depth_test.go index e31a18c9c7..3475985174 100644 --- a/tests/integration/query/commits/with_depth_test.go +++ b/tests/integration/query/commits/with_depth_test.go @@ -36,13 +36,13 @@ func TestQueryCommitsWithDepth1(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -81,16 +81,16 @@ func TestQueryCommitsWithDepth1WithUpdate(t *testing.T) { Results: []map[string]any{ { // "Age" field head - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, }, @@ -137,27 +137,27 @@ func TestQueryCommitsWithDepth2WithUpdate(t *testing.T) { Results: []map[string]any{ { // Composite head - "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", "height": int64(3), }, { // Composite head -1 - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { // "Age" field head - "cid": "bafybeid44afmsi6hh6yasgcjncnlvdpqsu2durizsxhdmhsbrqekypf6aa", + "cid": "bafybeifwa5vgfvnrdwzqmojsxilwbg2k37axh2fs57zfmddz3l5yivn4la", "height": int64(3), }, { // "Age" field head -1 - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, }, @@ -195,22 +195,22 @@ func TestQueryCommitsWithDepth1AndMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieepmzk3s5dzxztfq5zi5e5g3mnb6yfumx3euknnbur4x3a5neidq", + "cid": "bafybeicacj5fmr267b6kkmv4ck3g5cm5odca7hu7ajwagfttpspbsu7n5u", }, { - "cid": "bafybeifcxdrzqfj54w5mls7mf6nhxtjnweoevves7rwzsda6gmvzqc4t7y", + "cid": "bafybeiexu7xpwhyo2azo2ap2nbny5d4chhr725xrhmxnt5ebabucyjlfqu", }, { - "cid": "bafybeihavavtkfgaevtnzbabdwmgpamgbpkonw4ardalsamcitspqaxhs4", + "cid": "bafybeibbp6jn7y2t6jakbdtvboruieo3iobyuumppbwbw7rwkmz4tdh5yq", }, { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_cid_test.go b/tests/integration/query/commits/with_doc_id_cid_test.go index abaaa4b434..9f61805048 100644 --- a/tests/integration/query/commits/with_doc_id_cid_test.go +++ b/tests/integration/query/commits/with_doc_id_cid_test.go @@ -104,14 +104,14 @@ func TestQueryCommitsWithDocIDAndCidWithUpdate(t *testing.T) { Request: ` { commits( docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", - cid: "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4" + cid: "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_count_test.go b/tests/integration/query/commits/with_doc_id_count_test.go index da28665990..89ba666163 100644 --- a/tests/integration/query/commits/with_doc_id_count_test.go +++ b/tests/integration/query/commits/with_doc_id_count_test.go @@ -37,15 +37,15 @@ func TestQueryCommitsWithDocIDAndLinkCount(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "_count": 0, }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "_count": 0, }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "_count": 2, }, }, diff --git a/tests/integration/query/commits/with_doc_id_field_test.go b/tests/integration/query/commits/with_doc_id_field_test.go index 790fa672a1..65fb4a5637 100644 --- a/tests/integration/query/commits/with_doc_id_field_test.go +++ b/tests/integration/query/commits/with_doc_id_field_test.go @@ -118,7 +118,7 @@ func TestQueryCommitsWithDocIDAndFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, }, }, @@ -150,7 +150,7 @@ func TestQueryCommitsWithDocIDAndCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_limit_offset_test.go index 84be4f5682..47b21aaf08 100644 --- a/tests/integration/query/commits/with_doc_id_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_offset_test.go @@ -57,10 +57,10 @@ func TestQueryCommitsWithDocIDAndLimitAndOffset(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", }, { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_test.go b/tests/integration/query/commits/with_doc_id_limit_test.go index a84344a402..938ce72ea9 100644 --- a/tests/integration/query/commits/with_doc_id_limit_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_test.go @@ -50,10 +50,10 @@ func TestQueryCommitsWithDocIDAndLimit(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", }, { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go index 6ccf0cca44..058825acca 100644 --- a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go @@ -58,11 +58,11 @@ func TestQueryCommitsWithDocIDAndOrderAndLimitAndOffset(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", "height": int64(3), }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_test.go b/tests/integration/query/commits/with_doc_id_order_test.go index 02f4426958..70ab643688 100644 --- a/tests/integration/query/commits/with_doc_id_order_test.go +++ b/tests/integration/query/commits/with_doc_id_order_test.go @@ -44,23 +44,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightDesc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, }, @@ -99,23 +99,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, }, @@ -154,24 +154,24 @@ func TestQueryCommitsWithDocIDAndOrderCidDesc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", - "height": int64(1), + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", + "height": int64(2), }, { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", - "height": int64(2), + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", + "height": int64(1), }, }, }, @@ -209,23 +209,23 @@ func TestQueryCommitsWithDocIDAndOrderCidAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", - "height": int64(2), + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", + "height": int64(1), }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", - "height": int64(1), + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", + "height": int64(2), }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, }, @@ -278,39 +278,39 @@ func TestQueryCommitsWithDocIDAndOrderAndMultiUpdatesCidAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeiewgawahat7sxdoafvu77uvsaaj2ttatqllj2qvnqhornzxl2gteq", + "cid": "bafybeigzaxekosbmrfrzjhkztodipzmz3voiqnia275347b6vkq5keouf4", "height": int64(3), }, { - "cid": "bafybeid44afmsi6hh6yasgcjncnlvdpqsu2durizsxhdmhsbrqekypf6aa", + "cid": "bafybeifwa5vgfvnrdwzqmojsxilwbg2k37axh2fs57zfmddz3l5yivn4la", "height": int64(3), }, { - "cid": "bafybeifq2bd3nkqa6q5tjb5lrmeoskimtchhodvcxdqeilck2x4k3z7ijq", + "cid": "bafybeifn2f5lgzall3dzva47khbtib77lt7ve5qyclou3ihi2hy2uqj4nm", "height": int64(4), }, { - "cid": "bafybeiakoro6m2bvfmtmczykyffvixx6ci7tbgczebhdwttx5ymh5c7wyy", + "cid": "bafybeieijpm36ntafrncl4kgx6dkxgpbftcl4f7obbbmagurcgdoj6sl5y", "height": int64(4), }, }, diff --git a/tests/integration/query/commits/with_doc_id_test.go b/tests/integration/query/commits/with_doc_id_test.go index b57219df46..1524409663 100644 --- a/tests/integration/query/commits/with_doc_id_test.go +++ b/tests/integration/query/commits/with_doc_id_test.go @@ -62,13 +62,13 @@ func TestQueryCommitsWithDocID(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -102,22 +102,22 @@ func TestQueryCommitsWithDocIDAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "links": []map[string]any{}, }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "links": []map[string]any{}, }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "links": []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, @@ -158,23 +158,23 @@ func TestQueryCommitsWithDocIDAndUpdate(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "height": int64(2), }, { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "height": int64(1), }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "height": int64(1), }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "height": int64(2), }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "height": int64(1), }, }, @@ -219,44 +219,44 @@ func TestQueryCommitsWithDocIDAndUpdateAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "links": []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "_head", }, }, }, { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "links": []map[string]any{}, }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "links": []map[string]any{}, }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", "links": []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "name": "_head", }, { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", "name": "age", }, }, }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "links": []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_doc_id_typename_test.go b/tests/integration/query/commits/with_doc_id_typename_test.go index 17a1422d7b..51bc88a946 100644 --- a/tests/integration/query/commits/with_doc_id_typename_test.go +++ b/tests/integration/query/commits/with_doc_id_typename_test.go @@ -37,15 +37,15 @@ func TestQueryCommitsWithDocIDWithTypeName(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "__typename": "Commit", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "__typename": "Commit", }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "__typename": "Commit", }, }, diff --git a/tests/integration/query/commits/with_field_test.go b/tests/integration/query/commits/with_field_test.go index bebd35b828..1ea35a8d96 100644 --- a/tests/integration/query/commits/with_field_test.go +++ b/tests/integration/query/commits/with_field_test.go @@ -66,7 +66,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, }, }, @@ -98,7 +98,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -131,8 +131,8 @@ func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionId(t *testing. }`, Results: []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", - "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, }, diff --git a/tests/integration/query/commits/with_group_test.go b/tests/integration/query/commits/with_group_test.go index 414df231c2..1971e6f6dd 100644 --- a/tests/integration/query/commits/with_group_test.go +++ b/tests/integration/query/commits/with_group_test.go @@ -89,10 +89,10 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(2), "_group": []map[string]any{ { - "cid": "bafybeia7qkfbfm4jijlkqs6uxziie2v57nin5gaa3afnpkruw352mmrt4q", + "cid": "bafybeiaho26jaxdjfuvyxozws6ushksjwidllvgai6kgxmqxhzylwzkvte", }, { - "cid": "bafybeid4fh7ggr2wgema6b5hrqroimcso3vxyous3oyck5c66vm72br7z4", + "cid": "bafybeiep7c6ouykgidnwzjeasyim3ost5qjkro4qvs62t4u4u7rolbmugm", }, }, }, @@ -100,13 +100,13 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(1), "_group": []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", }, }, }, @@ -142,7 +142,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "_group": []map[string]any{ { "height": int64(1), @@ -150,7 +150,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "_group": []map[string]any{ { "height": int64(1), @@ -158,7 +158,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "_group": []map[string]any{ { "height": int64(1), diff --git a/tests/integration/query/latest_commits/with_doc_id_field_test.go b/tests/integration/query/latest_commits/with_doc_id_field_test.go index 4c7ed89f9c..0b886b966a 100644 --- a/tests/integration/query/latest_commits/with_doc_id_field_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_field_test.go @@ -68,7 +68,7 @@ func TestQueryLatestCommitsWithDocIDAndFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "links": []map[string]any{}, }, }, @@ -101,14 +101,14 @@ func TestQueryLatestCommitsWithDocIDAndCompositeFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "links": []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, diff --git a/tests/integration/query/latest_commits/with_doc_id_test.go b/tests/integration/query/latest_commits/with_doc_id_test.go index 0ef237c300..089f6f5086 100644 --- a/tests/integration/query/latest_commits/with_doc_id_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_test.go @@ -38,14 +38,14 @@ func TestQueryLatestCommitsWithDocID(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", "links": []map[string]any{ { - "cid": "bafybeieoeoset5itv7alud2yzjmq6dqizymdwdmlvyxam2uxe4lfexooaq", + "cid": "bafybeietdefm4jtgcbpof5elqdptwnuevvzujb3j22n6dytx67vpmb3xn4", "name": "age", }, { - "cid": "bafybeih4plbb3rinhqvn663ssfwhnujdbnbjistymzowsry5nvmxchmqny", + "cid": "bafybeihc26puzzgnctvgvgowihytif52tmdj3y2ksx6tvge2wtjkjvtszi", "name": "name", }, }, @@ -75,8 +75,8 @@ func TestQueryLatestCommitsWithDocIDWithSchemaVersionIdField(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeic2zvs2beirqmgd45myszkqwj32w3oyduolugkxv4gxxph4c4mzva", - "schemaVersionId": "bafkreibpk53kumv6q3kkc3hz2y57tnbgizpodk3vleyc3h5muv6vm4pdoe", + "cid": "bafybeiafufeqwjo5eeeaobwiu6ibf73dnvlefr47mrx45z42337mfnezkq", + "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, } diff --git a/tests/integration/query/one_to_many/with_cid_doc_id_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go index 843bf12638..f3f5ff580c 100644 --- a/tests/integration/query/one_to_many/with_cid_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -104,7 +104,7 @@ func TestQueryOneToManyWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidixr5nt7vb5go4nx675exjubb6g7sn2upltkfvf4piepgcd5ntjm" + cid: "bafybeia3qbhebdwssoe5udinpbdj4pntb5wjr77ql7ptzq32howbaxz2cu" docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -179,7 +179,7 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidixr5nt7vb5go4nx675exjubb6g7sn2upltkfvf4piepgcd5ntjm", + cid: "bafybeia3qbhebdwssoe5udinpbdj4pntb5wjr77ql7ptzq32howbaxz2cu", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -252,7 +252,7 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeidixr5nt7vb5go4nx675exjubb6g7sn2upltkfvf4piepgcd5ntjm", + cid: "bafybeia3qbhebdwssoe5udinpbdj4pntb5wjr77ql7ptzq32howbaxz2cu", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { rating @@ -324,7 +324,7 @@ func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafybeicuhxlsrkonczjlrpj77xbg6fxgkncictecifxe7rdw4egxs72kse", + cid: "bafybeibqkdnc63xh5k4frs3x3k7z7p6sw4usjrhxd4iusbjj2uhxfjfjcq", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { rating diff --git a/tests/integration/query/one_to_many/with_id_field_test.go b/tests/integration/query/one_to_many/with_id_field_test.go index 8a16f1c49a..9f70b0d1b3 100644 --- a/tests/integration/query/one_to_many/with_id_field_test.go +++ b/tests/integration/query/one_to_many/with_id_field_test.go @@ -42,46 +42,7 @@ func TestQueryOneToManyWithIdFieldOnPrimary(t *testing.T) { published: [Book] } `, - }, - testUtils.CreateDoc{ - // bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed - CollectionID: 1, - Doc: `{ - "name": "John Grisham" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House", - "author_id": 123456 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "A Time for Mercy", - "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - }`, - ExpectedError: "value doesn't contain number; it contains string", - }, - testUtils.Request{ - Request: `query { - Book { - name - author_id - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author_id": int64(123456), - "author": nil, - }, - }, + ExpectedError: "relational id field of invalid kind. Field: author_id, Expected: ID, Actual: Int", }, }, } diff --git a/tests/integration/query/one_to_one/with_clashing_id_field_test.go b/tests/integration/query/one_to_one/with_clashing_id_field_test.go index f563f1e358..1dd97572ca 100644 --- a/tests/integration/query/one_to_one/with_clashing_id_field_test.go +++ b/tests/integration/query/one_to_one/with_clashing_id_field_test.go @@ -94,22 +94,7 @@ func TestQueryOneToOneWithClashingIdFieldOnPrimary(t *testing.T) { published: Book } `, - }, - testUtils.CreateDoc{ - // bae-d82dbe47-9df1-5e33-bd87-f92e9c378161 - CollectionID: 0, - Doc: `{ - "name": "Painted House", - "author_id": 123456 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{ - "name": "John Grisham", - "published_id": "bae-d82dbe47-9df1-5e33-bd87-f92e9c378161" - }`, - ExpectedError: "target document is already linked to another document.", + ExpectedError: "relational id field of invalid kind. Field: author_id, Expected: ID, Actual: Int", }, }, } diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go index 7c265a409c..6fe41d1aae 100644 --- a/tests/integration/query/simple/with_cid_doc_id_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -93,7 +93,7 @@ func TestQuerySimpleWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste", + cid: "bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -135,7 +135,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste", + cid: "bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -177,7 +177,7 @@ func TestQuerySimpleWithUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeicowz6vraybays3br77rm4yzkiykr6jlp3mmsbyqbkcvk2cdukdru", + cid: "bafybeibwxvtvppws6sjfoajazevrdh27g4qwn5wguslpabyl3kzxd2a6fm", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -224,7 +224,7 @@ func TestQuerySimpleWithUpdateAndMiddleCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeicowz6vraybays3br77rm4yzkiykr6jlp3mmsbyqbkcvk2cdukdru", + cid: "bafybeibwxvtvppws6sjfoajazevrdh27g4qwn5wguslpabyl3kzxd2a6fm", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -266,7 +266,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) testUtils.Request{ Request: `query { Users ( - cid: "bafybeidlsifvletowavkcihp2d4k62ayuznumttxsseqynatufwnahiste", + cid: "bafybeif757a4mdwimqwl24ujjnao6xlajiajz2hwuleopnptusuttri6zu", docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" ) { name @@ -280,7 +280,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) "name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + "schemaVersionId": "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", }, }, }, @@ -324,7 +324,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeihd4uju62lpqft3fheevde2cmcehty3zqkbpyp2zu2ehfwietcu5i", + cid: "bafybeicruxxfhxhyvefbxid7gukdbnfzkyad45phu4mnwzzqde24p32xnu", docID: "bae-a688789e-d8a6-57a7-be09-22e005ab79e0" ) { name @@ -376,7 +376,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeiecgpblwcvgs3lw66v2p7frvwwak4gg4754dax742lomfxfrrvb4i", + cid: "bafybeibeo7pmvzpkkanwd72q4qu3m4yxex3coufq7uogvcnjwgqzrlpco4", docID: "bae-fa6a97e9-e0e9-5826-8a8c-57775d35e07c" ) { name @@ -423,7 +423,7 @@ func TestCidAndDocIDQuery_ContainsPCounterWithIntKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeibinkgqwegghg7kqwk66etboc5jv42i4akasxrih35wrvykdwcima", + cid: "bafybeidtjhrssohan2f5nt7ml3nh4bovpaqhqjvijlpacfednyx77iw5y4", docID: "bae-a688789e-d8a6-57a7-be09-22e005ab79e0" ) { name @@ -470,7 +470,7 @@ func TestCidAndDocIDQuery_ContainsPCounterWithFloatKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeifsok5oy42zs2p7habfjr3ee3j7mxeag5nfdo7u4d2bfvm6hdhnpq", + cid: "bafybeieimeijjl4hdvqkt5gkn62j54nlnaetm4te7w4z2mdljlyphfsyji", docID: "bae-fa6a97e9-e0e9-5826-8a8c-57775d35e07c" ) { name diff --git a/tests/integration/query/simple/with_version_test.go b/tests/integration/query/simple/with_version_test.go index 08032dd694..615e75a293 100644 --- a/tests/integration/query/simple/with_version_test.go +++ b/tests/integration/query/simple/with_version_test.go @@ -46,14 +46,14 @@ func TestQuerySimpleWithEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "links": []map[string]any{ { - "cid": "bafybeibdnm4rrtu5upewruipxb5zcvytgjfhvhnvobifkyrsddyacdboxy", + "cid": "bafybeigdvbqfwrm6dxfnfv4srbue5agzpyzoifl77ix6df7k5pjhat3fwu", "name": "Age", }, { - "cid": "bafybeiekpxtt3nuqygah2dta3ztauifvx6dbw3sjrl6hi76tkxrjfzcste", + "cid": "bafybeicurnibuf3b6krgqm3sh2ohmvxiodvawagx2evod573z67xf54zxu", "name": "Name", }, }, @@ -90,7 +90,7 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithSchemaVersionId(t *testing.T) { "Name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreics522ai3tdep2trfeesb6csl5wqul4dexhhueha6b2xarmcctyoa", + "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, }, }, @@ -171,14 +171,14 @@ func TestQuerySimpleWithMultipleAliasedEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "L1": []map[string]any{ { - "cid": "bafybeibdnm4rrtu5upewruipxb5zcvytgjfhvhnvobifkyrsddyacdboxy", + "cid": "bafybeigdvbqfwrm6dxfnfv4srbue5agzpyzoifl77ix6df7k5pjhat3fwu", "name": "Age", }, { - "cid": "bafybeiekpxtt3nuqygah2dta3ztauifvx6dbw3sjrl6hi76tkxrjfzcste", + "cid": "bafybeicurnibuf3b6krgqm3sh2ohmvxiodvawagx2evod573z67xf54zxu", "name": "Name", }, }, @@ -242,7 +242,7 @@ func TestQuery_WithAllCommitFields_NoError(t *testing.T) { "_docID": docID, "_version": []map[string]any{ { - "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -251,15 +251,15 @@ func TestQuery_WithAllCommitFields_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeibdnm4rrtu5upewruipxb5zcvytgjfhvhnvobifkyrsddyacdboxy", + "cid": "bafybeigdvbqfwrm6dxfnfv4srbue5agzpyzoifl77ix6df7k5pjhat3fwu", "name": "Age", }, { - "cid": "bafybeiekpxtt3nuqygah2dta3ztauifvx6dbw3sjrl6hi76tkxrjfzcste", + "cid": "bafybeicurnibuf3b6krgqm3sh2ohmvxiodvawagx2evod573z67xf54zxu", "name": "Name", }, }, - "schemaVersionId": "bafkreics522ai3tdep2trfeesb6csl5wqul4dexhhueha6b2xarmcctyoa", + "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, }, }, @@ -321,7 +321,7 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "_docID": docID, "_version": []map[string]any{ { - "cid": "bafybeieywntwsejjuxxrwhlcudadsyc6xhy3pt6rcdhom3zvdewqhmncve", + "cid": "bafybeibpezk2dgdlyavsh3k7vbmgh3iwanqhkzo4byafgytjdv5c7xy73u", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -330,18 +330,18 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafybeibb6sup35cb4tjrgetjqkqshg3r56vk5up7ruz3rddqklttnk7yfi", + "cid": "bafybeihidcg4gkm6bnlyyghr5cq5dkn6x5a4l347amy7odsy5rkd7eu4qu", "name": "Age", }, { - "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "name": "_head", }, }, - "schemaVersionId": "bafkreics522ai3tdep2trfeesb6csl5wqul4dexhhueha6b2xarmcctyoa", + "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, { - "cid": "bafybeiaar7e2rama55djgnt5z2myspcmse4cfcwujo5z726qxpkp5af5z4", + "cid": "bafybeigxe467aute545c52e27ll3yun7rpkledh5tbjhxxs2i76dzkfdom", "collectionID": int64(1), "delta": nil, "docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", @@ -350,15 +350,15 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafybeibdnm4rrtu5upewruipxb5zcvytgjfhvhnvobifkyrsddyacdboxy", + "cid": "bafybeigdvbqfwrm6dxfnfv4srbue5agzpyzoifl77ix6df7k5pjhat3fwu", "name": "Age", }, { - "cid": "bafybeiekpxtt3nuqygah2dta3ztauifvx6dbw3sjrl6hi76tkxrjfzcste", + "cid": "bafybeicurnibuf3b6krgqm3sh2ohmvxiodvawagx2evod573z67xf54zxu", "name": "Name", }, }, - "schemaVersionId": "bafkreics522ai3tdep2trfeesb6csl5wqul4dexhhueha6b2xarmcctyoa", + "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, }, }, diff --git a/tests/integration/schema/crdt_type_test.go b/tests/integration/schema/crdt_type_test.go index fdc278e52c..2a321ef751 100644 --- a/tests/integration/schema/crdt_type_test.go +++ b/tests/integration/schema/crdt_type_test.go @@ -20,7 +20,7 @@ import ( ) func TestSchemaCreate_ContainsPNCounterTypeWithIntKind_NoError(t *testing.T) { - schemaVersionID := "bafkreihg7aweuwitzdtturuipps2rxw774o5iu36ovxqawdncxa4yibpsq" + schemaVersionID := "bafkreigsnu67poxm3663e7vl5cncl6pxdzndcc7jf66cnnvxzw5uko5iuu" test := testUtils.TestCase{ Actions: []any{ @@ -59,7 +59,7 @@ func TestSchemaCreate_ContainsPNCounterTypeWithIntKind_NoError(t *testing.T) { } func TestSchemaCreate_ContainsPNCounterTypeWithFloatKind_NoError(t *testing.T) { - schemaVersionID := "bafkreig7olui76coe4nmm6s7f6lza7d7i35rurktxhcbmrs4po7plcrnvu" + schemaVersionID := "bafkreieflo3tkhsywsqcyzoj6nqgxc6ovv5m5lc7bfbum6yqls5rxlwkye" test := testUtils.TestCase{ Actions: []any{ @@ -132,7 +132,7 @@ func TestSchemaCreate_ContainsPNCounterWithInvalidType_Error(t *testing.T) { } func TestSchemaCreate_ContainsPCounterTypeWithIntKind_NoError(t *testing.T) { - schemaVersionID := "bafkreidjvjnvtwwdkcdqwcmwxqzu3bxrbxs3rkn6h6h7kkxmibpli3mp7y" + schemaVersionID := "bafkreigbmy67fjsys3li5rbs64k3vezvdtbfryc67pxiju4nis7lrbanea" test := testUtils.TestCase{ Actions: []any{ @@ -171,7 +171,7 @@ func TestSchemaCreate_ContainsPCounterTypeWithIntKind_NoError(t *testing.T) { } func TestSchemaCreate_ContainsPCounterTypeWithFloatKind_NoError(t *testing.T) { - schemaVersionID := "bafkreiasm64v2oimv6uk3hlfap6awptumwkm4fxuoc3ck3ehfe2tmry66i" + schemaVersionID := "bafkreifcyba45ov5zqi6dbhlu72rmf4wp3crjynjvvpq6iuauns2ofbvzi" test := testUtils.TestCase{ Actions: []any{ diff --git a/tests/integration/schema/get_schema_test.go b/tests/integration/schema/get_schema_test.go index 9f7d3bea3c..a89f4a2eb9 100644 --- a/tests/integration/schema/get_schema_test.go +++ b/tests/integration/schema/get_schema_test.go @@ -71,9 +71,9 @@ func TestGetSchema_GivenNoSchemaGivenUnknownName(t *testing.T) { } func TestGetSchema_ReturnsAllSchema(t *testing.T) { - usersSchemaVersion1ID := "bafkreiaopue5oiqzbszdk265wl6lqkqc44glt2tgjncbwek447slainu7m" - usersSchemaVersion2ID := "bafkreibuxh4vi3xsob5vx22bn3i5osbkxtimdl2nrs74cqxuf2w3ys2f3y" - booksSchemaVersion1ID := "bafkreicwmtpmea4gis6lkt46l5evd2xhais36qd5egb2b7mjrqnojbtzja" + usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" + usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" + booksSchemaVersion1ID := "bafkreibiu34zrehpq346pwp5z24qkderm7ibhnpcqalhkivhnf5e2afqoy" test := testUtils.TestCase{ Actions: []any{ @@ -145,8 +145,8 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { - usersSchemaVersion1ID := "bafkreiaopue5oiqzbszdk265wl6lqkqc44glt2tgjncbwek447slainu7m" - usersSchemaVersion2ID := "bafkreibuxh4vi3xsob5vx22bn3i5osbkxtimdl2nrs74cqxuf2w3ys2f3y" + usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" + usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" test := testUtils.TestCase{ Actions: []any{ @@ -208,8 +208,8 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { - usersSchemaVersion1ID := "bafkreiaopue5oiqzbszdk265wl6lqkqc44glt2tgjncbwek447slainu7m" - usersSchemaVersion2ID := "bafkreibuxh4vi3xsob5vx22bn3i5osbkxtimdl2nrs74cqxuf2w3ys2f3y" + usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" + usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" test := testUtils.TestCase{ Actions: []any{ diff --git a/tests/integration/schema/migrations/query/simple_test.go b/tests/integration/schema/migrations/query/simple_test.go index 150e70a0a4..a588e70e87 100644 --- a/tests/integration/schema/migrations/query/simple_test.go +++ b/tests/integration/schema/migrations/query/simple_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQuery(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -115,8 +115,8 @@ func TestSchemaMigrationQueryMultipleDocs(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -178,8 +178,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredBeforeSchemaPatch(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -254,8 +254,8 @@ func TestSchemaMigrationQueryMigratesToIntermediaryVersion(t *testing.T) { // Register a migration from schema version 1 to schema version 2 **only** - // there should be no migration from version 2 to version 3. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -325,8 +325,8 @@ func TestSchemaMigrationQueryMigratesFromIntermediaryVersion(t *testing.T) { // Register a migration from schema version 2 to schema version 3 **only** - // there should be no migration from version 1 to version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", - DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -394,8 +394,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -411,8 +411,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", - DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -466,8 +466,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatches(t *test }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -483,8 +483,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatches(t *test }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", - DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -553,8 +553,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatchesWrongOrd testUtils.ConfigureMigration{ // Declare the migration from v2=>v3 before declaring the migration from v1=>v2 LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", - DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -570,8 +570,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatchesWrongOrd }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -712,8 +712,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingScalarField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -773,8 +773,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingInlineArrayField(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreicm3axeowuuorrvlpvzatvnsaa6224qt7erlzjjhevwkndn532pxe", - DestinationSchemaVersionID: "bafkreih4urgndwhrvjoruj55yv5n3luvvky4daq67ivahiici7yn35mkfu", + SourceSchemaVersionID: "bafkreicn6ltdovb6y7g3ecoptqkvx2y5y5yntrb5uydmg3jiakskqva2ta", + DestinationSchemaVersionID: "bafkreifv4vhz3dw7upc5u3omsqi6klz3h3e54ogfskp72gtut62fuxqrcu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -836,8 +836,8 @@ func TestSchemaMigrationQueryMigrationRemovesExistingField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", - DestinationSchemaVersionID: "bafkreig5ovmx3vbhskpazxzjvlezy4brrndxu7bhdn5z2iqnozvw5iliwu", + SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + DestinationSchemaVersionID: "bafkreiegvk3fkcjxoqqpp7npxqjdjwijiwthvynzmsvtzajpjevgu2krku", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -897,8 +897,8 @@ func TestSchemaMigrationQueryMigrationPreservesExistingFieldWhenFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", - DestinationSchemaVersionID: "bafkreig5ovmx3vbhskpazxzjvlezy4brrndxu7bhdn5z2iqnozvw5iliwu", + SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + DestinationSchemaVersionID: "bafkreiegvk3fkcjxoqqpp7npxqjdjwijiwthvynzmsvtzajpjevgu2krku", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -971,8 +971,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", - DestinationSchemaVersionID: "bafkreihmw2xtrfccga6dy2nsh2sqwnzmbsygm5xkoltf4v3u4vdrinliki", + SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + DestinationSchemaVersionID: "bafkreidgnuvanzqur3pkp4mmrd77ojwvov2rlczraaks4435e6wsgxpwoq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -1033,8 +1033,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcAndDstFieldNotRe }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreibthhctfd3rykinfa6ivvkhegp7sbhk5yvujdkhase7ilj5dz5gqi", - DestinationSchemaVersionID: "bafkreihmw2xtrfccga6dy2nsh2sqwnzmbsygm5xkoltf4v3u4vdrinliki", + SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + DestinationSchemaVersionID: "bafkreidgnuvanzqur3pkp4mmrd77ojwvov2rlczraaks4435e6wsgxpwoq", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_doc_id_test.go b/tests/integration/schema/migrations/query/with_doc_id_test.go index 70bf0040e3..ee175515dc 100644 --- a/tests/integration/schema/migrations/query/with_doc_id_test.go +++ b/tests/integration/schema/migrations/query/with_doc_id_test.go @@ -52,8 +52,8 @@ func TestSchemaMigrationQueryByDocID(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -158,8 +158,8 @@ func TestSchemaMigrationQueryMultipleQueriesByDocID(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_inverse_test.go b/tests/integration/schema/migrations/query/with_inverse_test.go index 2375fbb373..f436c332c0 100644 --- a/tests/integration/schema/migrations/query/with_inverse_test.go +++ b/tests/integration/schema/migrations/query/with_inverse_test.go @@ -49,8 +49,8 @@ func TestSchemaMigrationQueryInversesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreieabpdpv5ua4f6lc5lprud4vvbefmfinzqaewhx5gzuf7anwgrqmy", - DestinationSchemaVersionID: "bafkreid2g456hvlkedusgfp6argh76a74ymrlii2ag4yqqsn2sgt4pkslu", + SourceSchemaVersionID: "bafkreicdkt3m6mgwuoix7qyijvwxwtj3dlre4a4c6mdnqbucbndwuxjsvi", + DestinationSchemaVersionID: "bafkreibpaw4dxy6bvmuoyegm7bwxyi24nubozmukemwiour4v62kz5ffuu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -66,8 +66,8 @@ func TestSchemaMigrationQueryInversesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreid2g456hvlkedusgfp6argh76a74ymrlii2ag4yqqsn2sgt4pkslu", - DestinationSchemaVersionID: "bafkreibwswh2pxloduldc2l5h5jzm7b6fqt3s4vijq3nssmn3rr5gws2ki", + SourceSchemaVersionID: "bafkreibpaw4dxy6bvmuoyegm7bwxyi24nubozmukemwiour4v62kz5ffuu", + DestinationSchemaVersionID: "bafkreickm4zodm2muw5qcctmssht63g57u7kxujqyoax4zb5c42zs4pdh4", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -89,7 +89,7 @@ func TestSchemaMigrationQueryInversesAcrossMultipleVersions(t *testing.T) { }`, }, testUtils.SetActiveSchemaVersion{ - SchemaVersionID: "bafkreieabpdpv5ua4f6lc5lprud4vvbefmfinzqaewhx5gzuf7anwgrqmy", + SchemaVersionID: "bafkreicdkt3m6mgwuoix7qyijvwxwtj3dlre4a4c6mdnqbucbndwuxjsvi", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go index 9aba1698e1..ca8de37f95 100644 --- a/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go @@ -46,8 +46,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocOnOtherSchemaBranch(t *testing. testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy", - DestinationSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", + SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", + DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_p2p_test.go b/tests/integration/schema/migrations/query/with_p2p_test.go index 4e9bee6828..f8b0197d5d 100644 --- a/tests/integration/schema/migrations/query/with_p2p_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_test.go @@ -46,8 +46,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy", - DestinationSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", + SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", + DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -145,8 +145,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchOlderSchemaVersion(t *tes testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy", - DestinationSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", + SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", + DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -163,8 +163,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchOlderSchemaVersion(t *tes testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", - DestinationSchemaVersionID: "bafkreihxxnewvatrejbay6uwon5pcxxh2427txtq3ozwc5qybc2hwyn4s4", + SourceSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", + DestinationSchemaVersionID: "bafkreidiohu3klvu4f2fdqcywtpqild4v7spsn7ivsjtg6sea6ome2oc4i", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -253,8 +253,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtNewerSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy", - DestinationSchemaVersionID: "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq", + SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", + DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -355,8 +355,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchNewerSchemaVersionWithSch // Register a migration from version 2 to version 3 on both nodes. // There is no migration from version 1 to 2, thus node 1 has no knowledge of schema version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", - DestinationSchemaVersionID: "bafkreiahtlb4wv2zrnezvlwyxwtk7a2gexhrcjbnzd3hf4ejsdgatjybey", + SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_restart_test.go b/tests/integration/schema/migrations/query/with_restart_test.go index 7d7525b910..f44264312c 100644 --- a/tests/integration/schema/migrations/query/with_restart_test.go +++ b/tests/integration/schema/migrations/query/with_restart_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithRestart(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -99,8 +99,8 @@ func TestSchemaMigrationQueryWithRestartAndMigrationBeforeSchemaPatch(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_schema_branch_test.go b/tests/integration/schema/migrations/query/with_schema_branch_test.go index fe882944ee..0ed9e68aca 100644 --- a/tests/integration/schema/migrations/query/with_schema_branch_test.go +++ b/tests/integration/schema/migrations/query/with_schema_branch_test.go @@ -21,7 +21,7 @@ import ( ) func TestSchemaMigrationQuery_WithBranchingSchema(t *testing.T) { - schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" test := testUtils.TestCase{ Description: "Test schema update, with branching schema migrations", diff --git a/tests/integration/schema/migrations/query/with_set_default_test.go b/tests/integration/schema/migrations/query/with_set_default_test.go index 8501d68a8e..17c147338c 100644 --- a/tests/integration/schema/migrations/query/with_set_default_test.go +++ b/tests/integration/schema/migrations/query/with_set_default_test.go @@ -22,7 +22,7 @@ import ( ) func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t *testing.T) { - schemaVersionID2 := "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq" + schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" test := testUtils.TestCase{ Description: "Test schema migration", @@ -83,8 +83,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t * } func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t *testing.T) { - schemaVersionID1 := "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy" - schemaVersionID2 := "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq" + schemaVersionID1 := "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce" + schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" test := testUtils.TestCase{ Description: "Test schema migration", @@ -158,8 +158,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t } func TestSchemaMigrationQuery_WithSetDefaultToOriginalVersionThatDocWasCreatedAt_ClearsMigrations(t *testing.T) { - schemaVersionID1 := "bafkreihax57fohcdupqr2l4heoqxdsiggjfeaubr44tgrz4xqdgvnid4xy" - schemaVersionID2 := "bafkreifer354qmdrwdtae5n3k7sbl2oauis3mz24fk46p3otub7ojznobq" + schemaVersionID1 := "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce" + schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" test := testUtils.TestCase{ Description: "Test schema migration", diff --git a/tests/integration/schema/migrations/query/with_txn_test.go b/tests/integration/schema/migrations/query/with_txn_test.go index f22d4bcbc4..880f9e01ed 100644 --- a/tests/integration/schema/migrations/query/with_txn_test.go +++ b/tests/integration/schema/migrations/query/with_txn_test.go @@ -47,8 +47,8 @@ func TestSchemaMigrationQueryWithTxn(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -109,8 +109,8 @@ func TestSchemaMigrationQueryWithTxnAndCommit(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_update_test.go b/tests/integration/schema/migrations/query/with_update_test.go index b3ddd94e77..93a2586e25 100644 --- a/tests/integration/schema/migrations/query/with_update_test.go +++ b/tests/integration/schema/migrations/query/with_update_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithUpdateRequest(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -129,8 +129,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredAfterUpdate(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/simple_test.go b/tests/integration/schema/migrations/simple_test.go index 6b7767943a..a7826f5366 100644 --- a/tests/integration/schema/migrations/simple_test.go +++ b/tests/integration/schema/migrations/simple_test.go @@ -106,8 +106,8 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", - DestinationSchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", + DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -154,11 +154,11 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, { ID: 3, - SchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + SchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", }, { ID: 4, - SchemaVersionID: "bafkreifpgr7zjwxmrjpo3rtybd2kqye6mmf5copqwzv27a5fgpvbq4aqm4", + SchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", Sources: []any{ &client.CollectionSource{ SourceCollectionID: 3, diff --git a/tests/integration/schema/one_one_test.go b/tests/integration/schema/one_one_test.go index e14792f75e..b5bc75bb48 100644 --- a/tests/integration/schema/one_one_test.go +++ b/tests/integration/schema/one_one_test.go @@ -30,7 +30,7 @@ func TestSchemaOneOne_NoPrimary_Errors(t *testing.T) { owner: User } `, - ExpectedError: "primary side of relation not defined. RelationName: dog_user", + ExpectedError: "relation missing field. Object: Dog, RelationName: dog_user", }, }, } diff --git a/tests/integration/schema/relations_test.go b/tests/integration/schema/relations_test.go index ade67c689a..892c6e67ac 100644 --- a/tests/integration/schema/relations_test.go +++ b/tests/integration/schema/relations_test.go @@ -135,45 +135,7 @@ func TestSchemaRelationErrorsGivenOneSidedManyRelationField(t *testing.T) { dogs: [Dog] } `, - ExpectedError: "relation must be defined on both schemas. Field: dogs, Type: Dog", - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaRelationErrorsGivenOneSidedRelationField(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - type User { - dog: Dog - } - `, - ExpectedError: "relation must be defined on both schemas. Field: dog, Type: Dog", - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaRelation_GivenSelfReferemceRelationField_ReturnError(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - bestMate: Dog - } - `, - ExpectedError: "relation must be defined on both schemas. Field: bestMate, Type: Dog", + ExpectedError: "relation missing field. Object: Dog, RelationName: dog_user", }, }, } diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index 854321a170..9e169e6178 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -20,7 +20,7 @@ import ( ) func TestSchemaSimpleCreatesSchemaGivenEmptyType(t *testing.T) { - schemaVersionID := "bafkreiaopue5oiqzbszdk265wl6lqkqc44glt2tgjncbwek447slainu7m" + schemaVersionID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" test := testUtils.TestCase{ Actions: []any{ @@ -180,7 +180,7 @@ func TestSchemaSimpleErrorsGivenTypeWithInvalidFieldType(t *testing.T) { name: NotAType } `, - ExpectedError: "relation must be defined on both schemas. Field: name, Type: NotAType", + ExpectedError: "no type found for given name. Field: name, Kind: NotAType", }, }, } diff --git a/tests/integration/schema/updates/add/field/create_update_test.go b/tests/integration/schema/updates/add/field/create_update_test.go index 6ce10243c0..d299b70e7f 100644 --- a/tests/integration/schema/updates/add/field/create_update_test.go +++ b/tests/integration/schema/updates/add/field/create_update_test.go @@ -17,8 +17,8 @@ import ( ) func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoin(t *testing.T) { - initialSchemaVersionId := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" - updatedSchemaVersionId := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" + initialSchemaVersionId := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + updatedSchemaVersionId := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, version join", @@ -105,8 +105,8 @@ func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoi } func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndCommitQuery(t *testing.T) { - initialSchemaVersionId := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" - updatedSchemaVersionId := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" + initialSchemaVersionId := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + updatedSchemaVersionId := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, commits query", diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index 4b1247718d..abeff648fd 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 Democratized Data Foundation +// Copyright 2024 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -11,495 +11,14 @@ package kind import ( - "fmt" "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestSchemaUpdatesAddFieldKindForeignObjectArray(t *testing.T) { +func TestSchemaUpdatesAddFieldKindForeignObjectArray_UnknownSchema(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 17} } - ] - `, - ExpectedError: "no type found for given name. Type: 17", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingRelationName(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), missing relation name", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "[Users]" - }} - ] - `, - ExpectedError: "missing relation name. Field: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingKind(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), id field missing kind", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id"} } - ] - `, - ExpectedError: "relational id field of invalid kind. Field: foo_id, Expected: ID, Actual: 0", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldInvalidKind(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, id field invalid kind", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 2} } - ] - `, - ExpectedError: "relational id field of invalid kind. Field: foo_id, Expected: ID, Actual: Boolean", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingRelationName(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, id field missing relation name", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 1} } - ] - `, - ExpectedError: "missing relation name. Field: foo_id", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_OnlyHalfRelationDefined(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, only half relation defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - ExpectedError: "relation must be defined on both schemas. Field: foo, Type: Users", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_NoPrimaryDefined(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, no primary defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - ExpectedError: "primary side of relation not defined. RelationName: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_PrimaryDefinedOnManySide(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, no primary defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "IsPrimaryRelation": true, "RelationName": "foo" - }} - ] - `, - ExpectedError: "cannot set the many side of a relation as primary. Field: foobar", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, valid, functional", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - }, - testUtils.Request{ - Request: `mutation { - create_Users(input: {name: "John"}) { - _docID - } - }`, - Results: []map[string]any{ - { - "_docID": key1, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf(`mutation { - create_Users(input: {name: "Keenan", foo: "%s"}) { - name - foo { - name - } - } - }`, - key1, - ), - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_SinglePrimaryObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, with single object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_SingleSecondaryObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, with single object Kind substitution", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo_id": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitution(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array, with object Kind substitution", + Description: "Test schema update, add field with kind foreign object array, unknown schema", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -512,73 +31,20 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitution(t *t Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" + "Name": "foo", "Kind": "[Unknown]" }} ] `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, + ExpectedError: "no type found for given name. Field: foo, Kind: Unknown", }, }, } testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitutionWithAutoSchemaValues(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - +func TestSchemaUpdatesAddFieldKindForeignObjectArray_KnownSchema(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with object Kind substitution", + Description: "Test schema update, add field with kind foreign object array, known schema", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -591,177 +57,11 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitutionWithA Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingPrimaryIDField(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with auto id field generation", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo_id - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo_id": key1, - "foo": map[string]any{ - "name": "John", - }, - "foobar": []map[string]any{}, - }, - { - "name": "John", - "foo": nil, - "foo_id": nil, - "foobar": []map[string]any{ - { - "name": "Keenan", - }, - }, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingPrimaryIDField_DoesNotCreateIdOnManySide(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object array (17), with auto id field generation", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "[Users]", "RelationName": "foo" + "Name": "foo", "Kind": "[Users]" }} ] `, - }, - testUtils.Request{ - Request: `query { - Users { - foobar_id - } - }`, - ExpectedError: `Cannot query field "foobar_id" on type "Users"`, + ExpectedError: "secondary relation fields cannot be defined on the schema. Name: foo", }, }, } diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go index af51ec335d..56bfbd2131 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go @@ -67,32 +67,6 @@ func TestSchemaUpdatesAddFieldKindForeignObject_UnknownSchema(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationName(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object, missing relation name", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users" - }} - ] - `, - ExpectedError: "missing relation name. Field: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) { test := testUtils.TestCase{ Description: "Test schema update, add field with kind foreign object, id field missing kind", @@ -108,7 +82,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users","IsPrimaryRelation": true, "RelationName": "foo" + "Name": "foo", "Kind": "Users" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id"} } ] @@ -135,7 +109,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" + "Name": "foo", "Kind": "Users" }}, { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 2} } ] @@ -147,129 +121,6 @@ func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingRelationName(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object, id field missing relation name", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo_id", "Kind": 1} } - ] - `, - ExpectedError: "missing relation name. Field: foo_id", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_OnlyHalfRelationDefined(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object, only half relation defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - ExpectedError: "relation must be defined on both schemas. Field: foo, Type: Users", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_NoPrimaryDefined(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object, no primary defined", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }} - ] - `, - ExpectedError: "primary side of relation not defined. RelationName: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_BothSidesPrimary(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object, both sides primary", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - ExpectedError: "both sides of a relation cannot be primary. RelationName: foo", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" @@ -287,16 +138,10 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { Patch: ` [ { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" + "Name": "foo", "Kind": "Users" }}, { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" + "Name": "foo_id", "Kind": 1 }} ] `, @@ -340,9 +185,6 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { foo { name } - foobar { - name - } } }`, Results: []map[string]any{ @@ -351,14 +193,10 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { "foo": map[string]any{ "name": "John", }, - "foobar": nil, }, { "name": "John", "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, }, }, }, @@ -366,159 +204,3 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { } testUtils.ExecuteTestCase(t, test) } - -func TestSchemaUpdatesAddFieldKindForeignObject_MissingPrimaryIDField(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with auto primary ID field creation", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar_id", "Kind": 1, "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaUpdatesAddFieldKindForeignObject_MissingSecondaryIDField(t *testing.T) { - key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" - - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind foreign object (16), with auto secondary ID field creation", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo", "Kind": "Users", "IsPrimaryRelation": true, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foo_id", "Kind": 1, "RelationName": "foo" - }}, - { "op": "add", "path": "/Users/Fields/-", "value": { - "Name": "foobar", "Kind": "Users", "RelationName": "foo" - }} - ] - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf(`{ - "name": "Keenan", - "foo": "%s" - }`, - key1, - ), - }, - testUtils.Request{ - Request: `query { - Users { - name - foo { - name - } - foobar { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Keenan", - "foo": map[string]any{ - "name": "John", - }, - "foobar": nil, - }, - { - "name": "John", - "foo": nil, - "foobar": map[string]any{ - "name": "Keenan", - }, - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/schema/updates/add/field/simple_test.go b/tests/integration/schema/updates/add/field/simple_test.go index 45a9b6afd5..80aaec32d6 100644 --- a/tests/integration/schema/updates/add/field/simple_test.go +++ b/tests/integration/schema/updates/add/field/simple_test.go @@ -20,8 +20,8 @@ import ( ) func TestSchemaUpdatesAddFieldSimple(t *testing.T) { - schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" - schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -115,8 +115,8 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_Errors(t *testing.T) { } func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testing.T) { - schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" - schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" test := testUtils.TestCase{ Description: "Test schema update, add field", diff --git a/tests/integration/schema/updates/move/simple_test.go b/tests/integration/schema/updates/move/simple_test.go index 2e33c709d6..9898430e0f 100644 --- a/tests/integration/schema/updates/move/simple_test.go +++ b/tests/integration/schema/updates/move/simple_test.go @@ -17,7 +17,7 @@ import ( ) func TestSchemaUpdatesMoveCollectionDoesNothing(t *testing.T) { - schemaVersionID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersionID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" test := testUtils.TestCase{ Description: "Test schema update, move collection", diff --git a/tests/integration/schema/updates/remove/fields/simple_test.go b/tests/integration/schema/updates/remove/fields/simple_test.go index fae9b85dc7..ce9b8112f0 100644 --- a/tests/integration/schema/updates/remove/fields/simple_test.go +++ b/tests/integration/schema/updates/remove/fields/simple_test.go @@ -140,32 +140,3 @@ func TestSchemaUpdatesRemoveFieldTypErrors(t *testing.T) { } testUtils.ExecuteTestCase(t, test) } - -func TestSchemaUpdatesRemoveFieldRelationNameErrors(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, remove field RelationName", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Author { - name: String - book: [Book] - } - type Book { - name: String - author: Author - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "remove", "path": "/Author/Fields/1/RelationName" } - ] - `, - ExpectedError: "mutating an existing field is not supported. ProposedName: book", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/schema/updates/test/field/simple_test.go b/tests/integration/schema/updates/test/field/simple_test.go index effdb162fe..35cba3ec29 100644 --- a/tests/integration/schema/updates/test/field/simple_test.go +++ b/tests/integration/schema/updates/test/field/simple_test.go @@ -102,7 +102,7 @@ func TestSchemaUpdatesTestFieldPasses(t *testing.T) { Patch: ` [ { "op": "test", "path": "/Users/Fields/1", "value": { - "Name": "name", "Kind": 11, "IsPrimaryRelation":false, "RelationName":"", "Typ":1 + "Name": "name", "Kind": 11, "Typ":1 } } ] `, @@ -127,7 +127,7 @@ func TestSchemaUpdatesTestFieldPasses_UsingFieldNameAsIndex(t *testing.T) { Patch: ` [ { "op": "test", "path": "/Users/Fields/name", "value": { - "Kind": 11, "IsPrimaryRelation":false, "RelationName":"", "Typ":1 + "Kind": 11, "Typ":1 } } ] `, diff --git a/tests/integration/schema/updates/with_schema_branch_test.go b/tests/integration/schema/updates/with_schema_branch_test.go index a47a5f4bb4..d8f7d1afc2 100644 --- a/tests/integration/schema/updates/with_schema_branch_test.go +++ b/tests/integration/schema/updates/with_schema_branch_test.go @@ -20,9 +20,9 @@ import ( ) func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { - schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" - schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" - schemaVersion3ID := "bafkreiawvcmcwounww6dbzb2vlvvstqf7venmktd4tsgxkw4o4undmtipe" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" test := testUtils.TestCase{ Description: "Test schema update, with branching schema", @@ -169,10 +169,10 @@ func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { } func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { - schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" - schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" - schemaVersion3ID := "bafkreiawvcmcwounww6dbzb2vlvvstqf7venmktd4tsgxkw4o4undmtipe" - schemaVersion4ID := "bafkreidqp7ha7mfhwqpahevcpsn5etmi3soawyq76oytdxlyozvs6cgyui" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" + schemaVersion4ID := "bafkreid4ulxeclzgpzhznge7zdin6docxvklugvr6gt4jxfyanz5i2r2hu" test := testUtils.TestCase{ Description: "Test schema update, with patch on branching schema", @@ -307,9 +307,9 @@ func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { } func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *testing.T) { - schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" - schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" - schemaVersion3ID := "bafkreiawvcmcwounww6dbzb2vlvvstqf7venmktd4tsgxkw4o4undmtipe" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" test := testUtils.TestCase{ Description: "Test schema update, with branching schema toggling between branches", @@ -403,10 +403,10 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *tes } func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPatch(t *testing.T) { - schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" - schemaVersion2ID := "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply" - schemaVersion3ID := "bafkreiawvcmcwounww6dbzb2vlvvstqf7venmktd4tsgxkw4o4undmtipe" - schemaVersion4ID := "bafkreih5trmbzpjdgterha2amx2n6opgwlpvdyxfeyfi2uq7ncbodpl2cu" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" + schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" + schemaVersion4ID := "bafkreidjuyxhakc5yx7fucunoxijnfjvgqohf4sjoryzf27mqxidh37kne" test := testUtils.TestCase{ Description: "Test schema update, with branching schema toggling between branches then patch", @@ -545,7 +545,7 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPat } func TestSchemaUpdates_WithBranchingSchemaAndGetCollectionAtVersion(t *testing.T) { - schemaVersion1ID := "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a" + schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" test := testUtils.TestCase{ Description: `Test schema update, with branching schema toggling between branches and gets the diff --git a/tests/integration/schema/with_update_set_default_test.go b/tests/integration/schema/with_update_set_default_test.go index 9203a61655..f46e0540e3 100644 --- a/tests/integration/schema/with_update_set_default_test.go +++ b/tests/integration/schema/with_update_set_default_test.go @@ -92,7 +92,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToOriginal_NewFieldIsNotQueriable( SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetActiveSchemaVersion{ - SchemaVersionID: "bafkreiht46o4lakri2py2zw57ed3pdeib6ud6ojlsomgjlrgwh53wl3q4a", + SchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", }, testUtils.Request{ Request: `query { @@ -129,7 +129,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToNew_AllowsQueryingOfNewField(t * SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetActiveSchemaVersion{ - SchemaVersionID: "bafkreigdplzukezgpmjs45lw6kwzhtwge4xjzfgm6iodcd32d7kdageply", + SchemaVersionID: "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index b699ed9f7f..2e36bfecf1 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -841,7 +841,7 @@ func refreshDocuments( // We need to add the existing documents in the order in which the test case lists them // otherwise they cannot be referenced correctly by other actions. - doc, err := client.NewDocFromJSON([]byte(action.Doc), collection.Schema()) + doc, err := client.NewDocFromJSON([]byte(action.Doc), collection.Definition()) if err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway @@ -1195,7 +1195,7 @@ func createDocViaColSave( collections []client.Collection, ) (*client.Document, error) { var err error - doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Schema()) + doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Definition()) if err != nil { return nil, err } @@ -1215,7 +1215,7 @@ func createDocViaColCreate( collections []client.Collection, ) (*client.Document, error) { var err error - doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Schema()) + doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Definition()) if err != nil { return nil, err } diff --git a/tests/integration/view/one_to_many/simple_test.go b/tests/integration/view/one_to_many/simple_test.go index f6ccd699b8..30f76987a2 100644 --- a/tests/integration/view/one_to_many/simple_test.go +++ b/tests/integration/view/one_to_many/simple_test.go @@ -122,7 +122,7 @@ func TestView_OneToManyWithMixedSDL_Errors(t *testing.T) { books: [Book] } `, - ExpectedError: "relation must be defined on both schemas. Field: books, Type: Book", + ExpectedError: "relation missing field. Object: Book, RelationName: authorview_book", }, }, } @@ -457,46 +457,3 @@ func TestView_OneToManyWithDoubleSidedRelation_Errors(t *testing.T) { testUtils.ExecuteTestCase(t, test) } - -func TestView_OneToManyViewOfView(t *testing.T) { - test := testUtils.TestCase{ - Description: "One to many view of view", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Author { - name: String - books: [Book] - } - type Book { - name: String - author: Author - } - `, - }, - testUtils.CreateView{ - Query: ` - Author { - name - books { - name - } - } - `, - SDL: ` - type AuthorView { - name: String - books: [BookView] - } - interface BookView { - name: String - author: AuthorView - } - `, - ExpectedError: "relations in views must only be defined on one schema", - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go index d83d1594fd..34d575098e 100644 --- a/tests/predefined/gen_predefined.go +++ b/tests/predefined/gen_predefined.go @@ -109,7 +109,7 @@ type docGenerator struct { // It doesn't not modify the original doc. func toRequestedDoc(doc map[string]any, typeDef *client.CollectionDefinition) map[string]any { result := make(map[string]any) - for _, field := range typeDef.Schema.Fields { + for _, field := range typeDef.GetFields() { if field.IsRelation() || field.Name == request.DocIDFieldName { continue } @@ -131,7 +131,7 @@ func (this *docGenerator) generatePrimary( ) (map[string]any, []gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} requestedSecondary := toRequestedDoc(secDocMap, secType) - for _, secDocField := range secType.Schema.Fields { + for _, secDocField := range secType.GetFields() { if secDocField.IsRelation() { if secDocMapField, hasField := secDocMap[secDocField.Name]; hasField { if secDocField.IsPrimaryRelation { @@ -141,7 +141,7 @@ func (this *docGenerator) generatePrimary( if err != nil { return nil, nil, NewErrFailedToGenerateDoc(err) } - primDoc, err := client.NewDocFromMap(primDocMap, primType.Schema) + primDoc, err := client.NewDocFromMap(primDocMap, primType) if err != nil { return nil, nil, NewErrFailedToGenerateDoc(err) } @@ -174,7 +174,7 @@ func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName st if err != nil { return nil, err } - doc, err := client.NewDocFromMap(requested, typeDef.Schema) + doc, err := client.NewDocFromMap(requested, typeDef) if err != nil { return nil, NewErrFailedToGenerateDoc(err) } @@ -196,7 +196,7 @@ func (this *docGenerator) generateSecondaryDocs( parentTypeName string, ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} - for _, field := range primaryType.Schema.Fields { + for _, field := range primaryType.GetFields() { if field.IsRelation() { if _, hasProp := primaryDocMap[field.Name]; hasProp { if !field.IsPrimaryRelation && @@ -218,13 +218,13 @@ func (this *docGenerator) generateSecondaryDocs( func (this *docGenerator) generateSecondaryDocsForField( primaryDoc map[string]any, primaryTypeName string, - relField *client.SchemaFieldDescription, + relField *client.FieldDefinition, primaryDocID string, ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} relTypeDef := this.types[relField.Kind.Underlying()] primaryPropName := "" - for _, relDocField := range relTypeDef.Schema.Fields { + for _, relDocField := range relTypeDef.GetFields() { if relDocField.Kind.Underlying() == primaryTypeName && relDocField.IsPrimaryRelation { primaryPropName = relDocField.Name + request.RelatedObjectID switch relVal := primaryDoc[relField.Name].(type) { diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go index 94b261059e..30cd446697 100644 --- a/tests/predefined/gen_predefined_test.go +++ b/tests/predefined/gen_predefined_test.go @@ -39,7 +39,7 @@ func TestGeneratePredefinedFromSchema_Simple(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs, colDefMap["User"].Schema), docs) + errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs, colDefMap["User"]), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -66,7 +66,7 @@ func TestGeneratePredefinedFromSchema_StripExcessiveFields(t *testing.T) { errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - }, colDefMap["User"].Schema), docs) + }, colDefMap["User"]), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -108,18 +108,18 @@ func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { userDocs := mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - }, colDefMap["User"].Schema) + }, colDefMap["User"]) deviceDocs := mustAddDocIDsToDocs([]map[string]any{ { "model": "iPhone", - "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "John"}, colDefMap["User"].Schema), + "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "John"}, colDefMap["User"]), }, { "model": "MacBook", - "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}, colDefMap["User"].Schema), + "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}, colDefMap["User"]), }, - }, colDefMap["Device"].Schema) + }, colDefMap["Device"]) errorMsg := assertDocs(append(userDocs, deviceDocs...), docs) if errorMsg != "" { @@ -163,17 +163,17 @@ func TestGeneratePredefinedFromSchema_OneToOnePrimary(t *testing.T) { userDocs := mustAddDocIDsToDocs([]map[string]any{ { "name": "John", - "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema), + "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone"}, colDefMap["Device"]), }, { "name": "Fred", - "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook"}, colDefMap["Device"].Schema), + "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook"}, colDefMap["Device"]), }, - }, colDefMap["User"].Schema) + }, colDefMap["User"]) deviceDocs := mustAddDocIDsToDocs([]map[string]any{ {"model": "iPhone"}, {"model": "MacBook"}, - }, colDefMap["Device"].Schema) + }, colDefMap["Device"]) errorMsg := assertDocs(append(userDocs, deviceDocs...), docs) if errorMsg != "" { @@ -216,15 +216,15 @@ func TestGeneratePredefinedFromSchema_OneToOneToOnePrimary(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"].Schema) + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"]) deviceDoc := mustAddDocIDToDoc(map[string]any{ "model": "iPhone", "specs_id": specsDoc[request.DocIDFieldName], - }, colDefMap["Device"].Schema) + }, colDefMap["Device"]) userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", "device_id": deviceDoc[request.DocIDFieldName], - }, colDefMap["User"].Schema) + }, colDefMap["User"]) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -267,13 +267,13 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneMiddle(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"].Schema) - userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}, colDefMap["User"].Schema) + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"]) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}, colDefMap["User"]) deviceDoc := mustAddDocIDToDoc(map[string]any{ "model": "iPhone", "specs_id": specsDoc[request.DocIDFieldName], "owner_id": userDoc[request.DocIDFieldName], - }, colDefMap["Device"].Schema) + }, colDefMap["Device"]) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -316,15 +316,15 @@ func TestGeneratePredefinedFromSchema_OneToTwoPrimary(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema) + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"]) specsDoc := mustAddDocIDToDoc(map[string]any{ "OS": "iOS", "device_id": deviceDoc[request.DocIDFieldName], - }, colDefMap["Specs"].Schema) + }, colDefMap["Specs"]) userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", "device_id": deviceDoc[request.DocIDFieldName], - }, colDefMap["User"].Schema) + }, colDefMap["User"]) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -367,13 +367,13 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneRoot(t *testing.T) { colDefMap, err := parseSDL(schema) require.NoError(t, err) - deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema) - addressDoc := mustAddDocIDToDoc(map[string]any{"street": "Backer"}, colDefMap["Address"].Schema) + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"]) + addressDoc := mustAddDocIDToDoc(map[string]any{"street": "Backer"}, colDefMap["Address"]) userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", "device_id": deviceDoc[request.DocIDFieldName], "address_id": addressDoc[request.DocIDFieldName], - }, colDefMap["User"].Schema) + }, colDefMap["User"]) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, addressDoc}, docs) if errorMsg != "" { diff --git a/tests/predefined/util_test.go b/tests/predefined/util_test.go index f155062503..0160470b53 100644 --- a/tests/predefined/util_test.go +++ b/tests/predefined/util_test.go @@ -68,22 +68,22 @@ outer: return "" } -func mustGetDocIDFromDocMap(docMap map[string]any, sd client.SchemaDescription) string { - doc, err := client.NewDocFromMap(docMap, sd) +func mustGetDocIDFromDocMap(docMap map[string]any, collectionDefinition client.CollectionDefinition) string { + doc, err := client.NewDocFromMap(docMap, collectionDefinition) if err != nil { panic("can not get doc from map" + err.Error()) } return doc.ID().String() } -func mustAddDocIDToDoc(doc map[string]any, sd client.SchemaDescription) map[string]any { - doc[request.DocIDFieldName] = mustGetDocIDFromDocMap(doc, sd) +func mustAddDocIDToDoc(doc map[string]any, collectionDefinition client.CollectionDefinition) map[string]any { + doc[request.DocIDFieldName] = mustGetDocIDFromDocMap(doc, collectionDefinition) return doc } -func mustAddDocIDsToDocs(docs []map[string]any, sd client.SchemaDescription) []map[string]any { +func mustAddDocIDsToDocs(docs []map[string]any, collectionDefinition client.CollectionDefinition) []map[string]any { for i := range docs { - mustAddDocIDToDoc(docs[i], sd) + mustAddDocIDToDoc(docs[i], collectionDefinition) } return docs } From f36a43f712418af65d3c0a92c9ef5b975622c2ed Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Fri, 19 Apr 2024 16:50:40 -0400 Subject: [PATCH 33/49] test(i): Assert all expected props are returned in tests (#2535) ## Relevant issue(s) Resolves #2435 ## Description Asserts that all expected properties are returned in integration test queries. --- tests/integration/backup/one_to_one/import_test.go | 3 ++- .../state/simple/peer/with_create_add_field_test.go | 1 + .../query/one_to_many/with_cid_doc_id_test.go | 2 ++ .../one_to_many/with_group_related_id_alias_test.go | 1 + .../query/one_to_many/with_group_related_id_test.go | 1 + tests/integration/query/simple/with_order_test.go | 1 + .../migrations/query/with_p2p_schema_branch_test.go | 1 + tests/integration/utils2.go | 12 ++++++++++++ tests/integration/view/simple/with_filter_test.go | 1 + 9 files changed, 22 insertions(+), 1 deletion(-) diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index d7ca39ea55..8c3aff4fe2 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -237,7 +237,8 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { }, }, { - "name": "Game of chains", + "name": "Game of chains", + "author": nil, }, }, }, diff --git a/tests/integration/net/state/simple/peer/with_create_add_field_test.go b/tests/integration/net/state/simple/peer/with_create_add_field_test.go index 46ad3c5a9c..22133c78af 100644 --- a/tests/integration/net/state/simple/peer/with_create_add_field_test.go +++ b/tests/integration/net/state/simple/peer/with_create_add_field_test.go @@ -284,6 +284,7 @@ func TestP2PPeerCreateWithNewFieldDocSyncedBeforeReceivingNodeSchemaUpdatedDoesN { "Name": "John", // The email should be returned but it is not + "Email": nil, }, }, }, diff --git a/tests/integration/query/one_to_many/with_cid_doc_id_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go index f3f5ff580c..6b896ca6ed 100644 --- a/tests/integration/query/one_to_many/with_cid_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -255,6 +255,7 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { cid: "bafybeia3qbhebdwssoe5udinpbdj4pntb5wjr77ql7ptzq32howbaxz2cu", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { + name rating author { name @@ -327,6 +328,7 @@ func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { cid: "bafybeibqkdnc63xh5k4frs3x3k7z7p6sw4usjrhxd4iusbjj2uhxfjfjcq", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { + name rating author { name diff --git a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go index 9f17d2ffe7..bef01aee48 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go @@ -24,6 +24,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAlias(t *t Request: `query { Book(groupBy: [author]) { + author_id _group { name rating diff --git a/tests/integration/query/one_to_many/with_group_related_id_test.go b/tests/integration/query/one_to_many/with_group_related_id_test.go index 6b6b6f331f..4eec467480 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_test.go @@ -22,6 +22,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDFromManySide(t *testing.T Description: "One-to-many query with groupBy on related id (from many side).", Request: `query { Book(groupBy: [author_id]) { + author_id _group { name rating diff --git a/tests/integration/query/simple/with_order_test.go b/tests/integration/query/simple/with_order_test.go index f66241d944..1a1f966e60 100644 --- a/tests/integration/query/simple/with_order_test.go +++ b/tests/integration/query/simple/with_order_test.go @@ -22,6 +22,7 @@ func TestQuerySimpleWithEmptyOrder(t *testing.T) { Request: `query { Users(order: {}) { Name + Age } }`, Docs: map[int][]string{ diff --git a/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go index ca8de37f95..b5e7bdde03 100644 --- a/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go @@ -117,6 +117,7 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocOnOtherSchemaBranch(t *testing. Users { name phone + verified } } `, diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 2e36bfecf1..0a5afada24 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -1811,6 +1811,18 @@ func assertRequestResults( for docIndex, result := range resultantData { expectedResult := expectedResults[docIndex] + + require.Equal( + s.t, + len(expectedResult), + len(result), + fmt.Sprintf( + "%s \n(number of properties for item at index %v don't match)", + s.testCase.Description, + docIndex, + ), + ) + for field, actualValue := range result { expectedValue := expectedResult[field] diff --git a/tests/integration/view/simple/with_filter_test.go b/tests/integration/view/simple/with_filter_test.go index 07b0e130ed..a600a84729 100644 --- a/tests/integration/view/simple/with_filter_test.go +++ b/tests/integration/view/simple/with_filter_test.go @@ -118,6 +118,7 @@ func TestView_SimpleWithFilterOnViewAndQuery(t *testing.T) { query { UserView(filter: {age: {_eq: 31}}) { name + age } } `, From 39286f11fb307f49d90e41d3d7cc4d4a8957fef1 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 19 Apr 2024 17:41:35 -0700 Subject: [PATCH 34/49] fix: Make all array kinds nillable (#2534) ## Relevant issue(s) Resolves #2533 ## Description This PR fixes a bug where `ScalarArrayKind.IsNillable()` returned an incorrect value for arrays of non-nillable values. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- client/normal_nil.go | 16 ++++++++++++---- client/normal_value_test.go | 25 +++++++++++++++++++++++++ client/schema_field_description.go | 5 +---- 3 files changed, 38 insertions(+), 8 deletions(-) diff --git a/client/normal_nil.go b/client/normal_nil.go index 7513fa9979..7cd2df3f16 100644 --- a/client/normal_nil.go +++ b/client/normal_nil.go @@ -34,14 +34,22 @@ func NewNormalNil(kind FieldKind) (NormalValue, error) { return NewNormalNillableString(immutable.None[string]()), nil case FieldKind_NILLABLE_BLOB: return NewNormalNillableBytes(immutable.None[[]byte]()), nil - case FieldKind_NILLABLE_BOOL_ARRAY: + case FieldKind_BOOL_ARRAY: return NewNormalBoolNillableArray(immutable.None[[]bool]()), nil - case FieldKind_NILLABLE_INT_ARRAY: + case FieldKind_INT_ARRAY: return NewNormalIntNillableArray(immutable.None[[]int64]()), nil - case FieldKind_NILLABLE_FLOAT_ARRAY: + case FieldKind_FLOAT_ARRAY: return NewNormalFloatNillableArray(immutable.None[[]float64]()), nil - case FieldKind_NILLABLE_STRING_ARRAY: + case FieldKind_STRING_ARRAY: return NewNormalStringNillableArray(immutable.None[[]string]()), nil + case FieldKind_NILLABLE_BOOL_ARRAY: + return NewNormalNillableBoolNillableArray(immutable.None[[]immutable.Option[bool]]()), nil + case FieldKind_NILLABLE_INT_ARRAY: + return NewNormalNillableIntNillableArray(immutable.None[[]immutable.Option[int]]()), nil + case FieldKind_NILLABLE_FLOAT_ARRAY: + return NewNormalNillableFloatNillableArray(immutable.None[[]immutable.Option[float64]]()), nil + case FieldKind_NILLABLE_STRING_ARRAY: + return NewNormalNillableStringNillableArray(immutable.None[[]immutable.Option[string]]()), nil default: return nil, NewCanNotMakeNormalNilFromFieldKind(kind) } diff --git a/client/normal_value_test.go b/client/normal_value_test.go index 75e858b056..33cd20c46e 100644 --- a/client/normal_value_test.go +++ b/client/normal_value_test.go @@ -1622,3 +1622,28 @@ func TestNormalValue_ToArrayOfNormalValues(t *testing.T) { }) } } + +// This test documents a bug where array values +// were not returning the correct value for IsNillable +// and were also not convertible to a normal nil kind. +func TestArrayValue_IsNillable(t *testing.T) { + fieldKinds := []FieldKind{ + FieldKind_BOOL_ARRAY, + FieldKind_INT_ARRAY, + FieldKind_FLOAT_ARRAY, + FieldKind_STRING_ARRAY, + FieldKind_NILLABLE_BOOL_ARRAY, + FieldKind_NILLABLE_INT_ARRAY, + FieldKind_NILLABLE_FLOAT_ARRAY, + FieldKind_NILLABLE_STRING_ARRAY, + } + + for _, kind := range fieldKinds { + assert.True(t, kind.IsNillable()) + + v, err := NewNormalNil(kind) + require.NoError(t, err) + + assert.True(t, v.IsNil()) + } +} diff --git a/client/schema_field_description.go b/client/schema_field_description.go index f317ace116..87ee843ec8 100644 --- a/client/schema_field_description.go +++ b/client/schema_field_description.go @@ -147,10 +147,7 @@ func (k ScalarArrayKind) Underlying() string { } func (k ScalarArrayKind) IsNillable() bool { - return k == FieldKind_NILLABLE_BOOL_ARRAY || - k == FieldKind_NILLABLE_INT_ARRAY || - k == FieldKind_NILLABLE_FLOAT_ARRAY || - k == FieldKind_NILLABLE_STRING_ARRAY + return true } func (k ScalarArrayKind) IsObject() bool { From e65b164148867cfb002dec5f3c290139bc09da65 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 22 Apr 2024 10:00:59 -0700 Subject: [PATCH 35/49] refactor: Merge collection UpdateWith and DeleteWith (#2531) ## Relevant issue(s) Resolves #2457 ## Description This PR merges the `DeleteWith` and `UpdateWith` functions into a singular `DeleteWithFilter` and `UpdateWithFilter`. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- cli/collection_delete.go | 37 +--- cli/collection_update.go | 40 +--- client/collection.go | 75 ------- client/document.go | 3 - db/collection_delete.go | 111 ---------- db/collection_update.go | 155 -------------- docs/cli/defradb_client_collection_delete.md | 10 +- docs/cli/defradb_client_collection_update.md | 10 +- http/client_collection.go | 134 ++---------- http/handler_collection.go | 108 ++-------- planner/delete.go | 2 +- planner/update.go | 12 +- tests/clients/cli/wrapper_collection.go | 164 ++------------ .../update/simple/with_doc_id_test.go | 151 ------------- .../update/simple/with_doc_ids_test.go | 200 ------------------ tests/integration/utils2.go | 2 +- 16 files changed, 84 insertions(+), 1130 deletions(-) delete mode 100644 tests/integration/collection/update/simple/with_doc_id_test.go delete mode 100644 tests/integration/collection/update/simple/with_doc_ids_test.go diff --git a/cli/collection_delete.go b/cli/collection_delete.go index 5bbe32a964..a9776d1985 100644 --- a/cli/collection_delete.go +++ b/cli/collection_delete.go @@ -17,18 +17,18 @@ import ( ) func MakeCollectionDeleteCommand() *cobra.Command { - var argDocIDs []string + var argDocID string var filter string var cmd = &cobra.Command{ Use: "delete [-i --identity] [--filter --docID ]", Short: "Delete documents by docID or filter.", Long: `Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by docID(s): - defradb client collection delete --name User --docID bae-123,bae-456 +Example: delete by docID: + defradb client collection delete --name User --docID bae-123 -Example: delete by docID(s) with identity: - defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User --docID bae-123,bae-456 +Example: delete by docID with identity: + defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User --docID bae-123 Example: delete by filter: defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' @@ -40,30 +40,13 @@ Example: delete by filter: } switch { - case len(argDocIDs) == 1: - docID, err := client.NewDocIDFromString(argDocIDs[0]) + case argDocID != "": + docID, err := client.NewDocIDFromString(argDocID) if err != nil { return err } - res, err := col.DeleteWithDocID(cmd.Context(), docID) - if err != nil { - return err - } - return writeJSON(cmd, res) - case len(argDocIDs) > 1: - docIDs := make([]client.DocID, len(argDocIDs)) - for i, v := range argDocIDs { - docID, err := client.NewDocIDFromString(v) - if err != nil { - return err - } - docIDs[i] = docID - } - res, err := col.DeleteWithDocIDs(cmd.Context(), docIDs) - if err != nil { - return err - } - return writeJSON(cmd, res) + _, err = col.Delete(cmd.Context(), docID) + return err case filter != "": res, err := col.DeleteWithFilter(cmd.Context(), filter) if err != nil { @@ -75,7 +58,7 @@ Example: delete by filter: } }, } - cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") + cmd.Flags().StringVar(&argDocID, "docID", "", "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") return cmd } diff --git a/cli/collection_update.go b/cli/collection_update.go index 2777c0ed98..3e676edce9 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -17,7 +17,7 @@ import ( ) func MakeCollectionUpdateCommand() *cobra.Command { - var argDocIDs []string + var argDocID string var filter string var updater string var cmd = &cobra.Command{ @@ -32,13 +32,13 @@ Example: update by filter: defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by docIDs: +Example: update by docID: defradb client collection update --name User \ - --docID bae-123,bae-456 --updater '{ "verified": true }' + --docID bae-123 --updater '{ "verified": true }' -Example: update private docIDs, with identity: +Example: update private docID, with identity: defradb client collection update -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User \ - --docID bae-123,bae-456 --updater '{ "verified": true }' + --docID bae-123 --updater '{ "verified": true }' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { @@ -48,38 +48,14 @@ Example: update private docIDs, with identity: } switch { - case len(argDocIDs) == 1 && updater != "": - docID, err := client.NewDocIDFromString(argDocIDs[0]) - if err != nil { - return err - } - res, err := col.UpdateWithDocID(cmd.Context(), docID, updater) - if err != nil { - return err - } - return writeJSON(cmd, res) - case len(argDocIDs) > 1 && updater != "": - docIDs := make([]client.DocID, len(argDocIDs)) - for i, v := range argDocIDs { - docID, err := client.NewDocIDFromString(v) - if err != nil { - return err - } - docIDs[i] = docID - } - res, err := col.UpdateWithDocIDs(cmd.Context(), docIDs, updater) - if err != nil { - return err - } - return writeJSON(cmd, res) case filter != "" && updater != "": res, err := col.UpdateWithFilter(cmd.Context(), filter, updater) if err != nil { return err } return writeJSON(cmd, res) - case len(argDocIDs) == 1 && len(args) == 1: - docID, err := client.NewDocIDFromString(argDocIDs[0]) + case argDocID != "" && len(args) == 1: + docID, err := client.NewDocIDFromString(argDocID) if err != nil { return err } @@ -96,7 +72,7 @@ Example: update private docIDs, with identity: } }, } - cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") + cmd.Flags().StringVar(&argDocID, "docID", "", "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") cmd.Flags().StringVar(&updater, "updater", "", "Document updater") return cmd diff --git a/client/collection.go b/client/collection.go index 05cb821889..38c309a0e8 100644 --- a/client/collection.go +++ b/client/collection.go @@ -78,21 +78,6 @@ type Collection interface { // Will return true if a matching document exists, otherwise will return false. Exists(ctx context.Context, docID DocID) (bool, error) - // UpdateWith updates a target document using the given updater type. - // - // Target can be a Filter statement, a single DocID, a single document, - // an array of DocIDs, or an array of documents. - // It is recommended to use the respective typed versions of Update - // (e.g. UpdateWithFilter or UpdateWithDocID) over this function if you can. - // - // Returns an ErrInvalidUpdateTarget error if the target type is not supported. - // Returns an ErrInvalidUpdater error if the updater type is not supported. - UpdateWith( - ctx context.Context, - target any, - updater string, - ) (*UpdateResult, error) - // UpdateWithFilter updates using a filter to target documents for update. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch @@ -103,44 +88,6 @@ type Collection interface { updater string, ) (*UpdateResult, error) - // UpdateWithDocID updates using a DocID to target a single document for update. - // - // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch - // else an ErrInvalidUpdater will be returned. - // - // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. - UpdateWithDocID( - ctx context.Context, - docID DocID, - updater string, - ) (*UpdateResult, error) - - // UpdateWithDocIDs updates documents matching the given DocIDs. - // - // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch - // else an ErrInvalidUpdater will be returned. - // - // Returns an ErrDocumentNotFound if a document is not found for any given DocID. - UpdateWithDocIDs( - ctx context.Context, - docIDs []DocID, - updater string, - ) (*UpdateResult, error) - - // DeleteWith deletes a target document. - // - // Target can be a Filter statement, a single DocID, a single document, an array of DocIDs, - // or an array of documents. It is recommended to use the respective typed versions of Delete - // (e.g. DeleteWithFilter or DeleteWithDocID) over this function if you can. - // This operation will soft-delete documents related to the given DocID and update the composite block - // with a status of `Deleted`. - // - // Returns an ErrInvalidDeleteTarget if the target type is not supported. - DeleteWith( - ctx context.Context, - target any, - ) (*DeleteResult, error) - // DeleteWithFilter deletes documents matching the given filter. // // This operation will soft-delete documents related to the given filter and update the composite block @@ -150,28 +97,6 @@ type Collection interface { filter any, ) (*DeleteResult, error) - // DeleteWithDocID deletes using a DocID to target a single document for delete. - // - // This operation will soft-delete documents related to the given DocID and update the composite block - // with a status of `Deleted`. - // - // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. - DeleteWithDocID( - ctx context.Context, - docID DocID, - ) (*DeleteResult, error) - - // DeleteWithDocIDs deletes documents matching the given DocIDs. - // - // This operation will soft-delete documents related to the given DocIDs and update the composite block - // with a status of `Deleted`. - // - // Returns an ErrDocumentNotFound if a document is not found for any given DocID. - DeleteWithDocIDs( - ctx context.Context, - docIDs []DocID, - ) (*DeleteResult, error) - // Get returns the document with the given DocID. // // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. diff --git a/client/document.go b/client/document.go index 2325328b13..4534e9fa33 100644 --- a/client/document.go +++ b/client/document.go @@ -630,9 +630,6 @@ func (doc *Document) setCBOR(t CType, field string, val NormalValue) error { func (doc *Document) setAndParseObjectType(value map[string]any) error { for k, v := range value { - if v == nil { - continue - } err := doc.Set(k, v) if err != nil { return err diff --git a/db/collection_delete.go b/db/collection_delete.go index e8bf13b221..62ebd7f167 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -15,74 +15,11 @@ import ( "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/merkle/clock" ) -// DeleteWith deletes a target document. -// -// Target can be a Filter statement, a single DocID, a single document, -// an array of DocIDs, or an array of documents. -// -// If you want more type safety, use the respective typed versions of Delete. -// Eg: DeleteWithFilter or DeleteWithDocID -func (c *collection) DeleteWith( - ctx context.Context, - target any, -) (*client.DeleteResult, error) { - switch t := target.(type) { - case string, map[string]any, *request.Filter: - return c.DeleteWithFilter(ctx, t) - case client.DocID: - return c.DeleteWithDocID(ctx, t) - case []client.DocID: - return c.DeleteWithDocIDs(ctx, t) - default: - return nil, client.ErrInvalidDeleteTarget - } -} - -// DeleteWithDocID deletes using a DocID to target a single document for delete. -func (c *collection) DeleteWithDocID( - ctx context.Context, - docID client.DocID, -) (*client.DeleteResult, error) { - ctx, txn, err := ensureContextTxn(ctx, c.db, false) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - dsKey := c.getPrimaryKeyFromDocID(docID) - res, err := c.deleteWithKey(ctx, dsKey) - if err != nil { - return nil, err - } - - return res, txn.Commit(ctx) -} - -// DeleteWithDocIDs is the same as DeleteWithDocID but accepts multiple DocIDs as a slice. -func (c *collection) DeleteWithDocIDs( - ctx context.Context, - docIDs []client.DocID, -) (*client.DeleteResult, error) { - ctx, txn, err := ensureContextTxn(ctx, c.db, false) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - res, err := c.deleteWithIDs(ctx, docIDs, client.Deleted) - if err != nil { - return nil, err - } - - return res, txn.Commit(ctx) -} - // DeleteWithFilter deletes using a filter to target documents for delete. func (c *collection) DeleteWithFilter( ctx context.Context, @@ -102,54 +39,6 @@ func (c *collection) DeleteWithFilter( return res, txn.Commit(ctx) } -func (c *collection) deleteWithKey( - ctx context.Context, - key core.PrimaryDataStoreKey, -) (*client.DeleteResult, error) { - // Check the key we have been given to delete with actually has a corresponding - // document (i.e. document actually exists in the collection). - err := c.applyDelete(ctx, key) - if err != nil { - return nil, err - } - - // Upon successfull deletion, record a summary. - results := &client.DeleteResult{ - Count: 1, - DocIDs: []string{key.DocID}, - } - - return results, nil -} - -func (c *collection) deleteWithIDs( - ctx context.Context, - docIDs []client.DocID, - _ client.DocumentStatus, -) (*client.DeleteResult, error) { - results := &client.DeleteResult{ - DocIDs: make([]string, 0), - } - - for _, docID := range docIDs { - primaryKey := c.getPrimaryKeyFromDocID(docID) - - // Apply the function that will perform the full deletion of this document. - err := c.applyDelete(ctx, primaryKey) - if err != nil { - return nil, err - } - - // Add this deleted docID to our list. - results.DocIDs = append(results.DocIDs, docID.String()) - } - - // Upon successfull deletion, record a summary of how many we deleted. - results.Count = int64(len(results.DocIDs)) - - return results, nil -} - func (c *collection) deleteWithFilter( ctx context.Context, filter any, diff --git a/db/collection_update.go b/db/collection_update.go index 9110976593..e59469715a 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -24,28 +24,6 @@ import ( "github.com/sourcenetwork/defradb/planner" ) -// UpdateWith updates a target document using the given updater type. Target -// can be a Filter statement, a single DocID, a single document, -// an array of DocIDs, or an array of documents. -// If you want more type safety, use the respective typed versions of Update. -// Eg: UpdateWithFilter or UpdateWithDocID -func (c *collection) UpdateWith( - ctx context.Context, - target any, - updater string, -) (*client.UpdateResult, error) { - switch t := target.(type) { - case string, map[string]any, *request.Filter: - return c.UpdateWithFilter(ctx, t, updater) - case client.DocID: - return c.UpdateWithDocID(ctx, t, updater) - case []client.DocID: - return c.UpdateWithDocIDs(ctx, t, updater) - default: - return nil, client.ErrInvalidUpdateTarget - } -} - // UpdateWithFilter updates using a filter to target documents for update. // An updater value is provided, which could be a string Patch, string Merge Patch // or a parsed Patch, or parsed Merge Patch. @@ -67,139 +45,6 @@ func (c *collection) UpdateWithFilter( return res, txn.Commit(ctx) } -// UpdateWithDocID updates using a DocID to target a single document for update. -// An updater value is provided, which could be a string Patch, string Merge Patch -// or a parsed Patch, or parsed Merge Patch. -func (c *collection) UpdateWithDocID( - ctx context.Context, - docID client.DocID, - updater string, -) (*client.UpdateResult, error) { - ctx, txn, err := ensureContextTxn(ctx, c.db, false) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - res, err := c.updateWithDocID(ctx, docID, updater) - if err != nil { - return nil, err - } - - return res, txn.Commit(ctx) -} - -// UpdateWithDocIDs is the same as UpdateWithDocID but accepts multiple DocIDs as a slice. -// An updater value is provided, which could be a string Patch, string Merge Patch -// or a parsed Patch, or parsed Merge Patch. -func (c *collection) UpdateWithDocIDs( - ctx context.Context, - docIDs []client.DocID, - updater string, -) (*client.UpdateResult, error) { - ctx, txn, err := ensureContextTxn(ctx, c.db, false) - if err != nil { - return nil, err - } - defer txn.Discard(ctx) - - res, err := c.updateWithIDs(ctx, docIDs, updater) - if err != nil { - return nil, err - } - - return res, txn.Commit(ctx) -} - -func (c *collection) updateWithDocID( - ctx context.Context, - docID client.DocID, - updater string, -) (*client.UpdateResult, error) { - parsedUpdater, err := fastjson.Parse(updater) - if err != nil { - return nil, err - } - - isPatch := false - if parsedUpdater.Type() == fastjson.TypeArray { - isPatch = true - } else if parsedUpdater.Type() != fastjson.TypeObject { - return nil, client.ErrInvalidUpdater - } - - doc, err := c.Get(ctx, docID, false) - if err != nil { - return nil, err - } - - if isPatch { - // todo - } else { - err = doc.SetWithJSON([]byte(updater)) - } - if err != nil { - return nil, err - } - - err = c.update(ctx, doc) - if err != nil { - return nil, err - } - - results := &client.UpdateResult{ - Count: 1, - DocIDs: []string{docID.String()}, - } - return results, nil -} - -func (c *collection) updateWithIDs( - ctx context.Context, - docIDs []client.DocID, - updater string, -) (*client.UpdateResult, error) { - parsedUpdater, err := fastjson.Parse(updater) - if err != nil { - return nil, err - } - - isPatch := false - if parsedUpdater.Type() == fastjson.TypeArray { - isPatch = true - } else if parsedUpdater.Type() != fastjson.TypeObject { - return nil, client.ErrInvalidUpdater - } - - results := &client.UpdateResult{ - DocIDs: make([]string, len(docIDs)), - } - for i, docIDs := range docIDs { - doc, err := c.Get(ctx, docIDs, false) - if err != nil { - return nil, err - } - - if isPatch { - // todo - } else { - err = doc.SetWithJSON([]byte(updater)) - } - if err != nil { - return nil, err - } - - err = c.update(ctx, doc) - if err != nil { - return nil, err - } - - results.DocIDs[i] = docIDs.String() - results.Count++ - } - return results, nil -} - func (c *collection) updateWithFilter( ctx context.Context, filter any, diff --git a/docs/cli/defradb_client_collection_delete.md b/docs/cli/defradb_client_collection_delete.md index f3c678e857..110654ff5d 100644 --- a/docs/cli/defradb_client_collection_delete.md +++ b/docs/cli/defradb_client_collection_delete.md @@ -6,11 +6,11 @@ Delete documents by docID or filter. Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by docID(s): - defradb client collection delete --name User --docID bae-123,bae-456 +Example: delete by docID: + defradb client collection delete --name User --docID bae-123 -Example: delete by docID(s) with identity: - defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User --docID bae-123,bae-456 +Example: delete by docID with identity: + defradb client collection delete -i cosmos1f2djr7dl9vhrk3twt3xwqp09nhtzec9mdkf70j --name User --docID bae-123 Example: delete by filter: defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' @@ -23,7 +23,7 @@ defradb client collection delete [-i --identity] [--filter --docID --docID Date: Mon, 22 Apr 2024 15:58:42 -0400 Subject: [PATCH 36/49] docs: Add data definition document (#2544) ## Relevant issue(s) Resolves #2538 ## Description Adds data definition document, documenting how data is defined. It is targeted at us, and anyone looking to embed defradb in their application. --- client/README.md | 3 ++ client/data_definition.md | 65 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 client/README.md create mode 100644 client/data_definition.md diff --git a/client/README.md b/client/README.md new file mode 100644 index 0000000000..ec2cf7efcd --- /dev/null +++ b/client/README.md @@ -0,0 +1,3 @@ +The `client` package is the primary access point for interacting with an embedded DefraDB instance. + +[Data definition overview](./data_definition.md) - How the shape of documents are defined and grouped. diff --git a/client/data_definition.md b/client/data_definition.md new file mode 100644 index 0000000000..c0a197158e --- /dev/null +++ b/client/data_definition.md @@ -0,0 +1,65 @@ +# Data Definition in a DefraDB instance + +Data held in a DefraDB instance is organized into [collections](#collections) of documents. [Collections](#collections) are [local](#local-definitions) groupings of documents that share the same [globally](#global-definitions) defined shape declared by a [schema](#schemas). + +## Local definitions + +Local definitions are specific to the node you are directly working with, they are not shared with, or assumed to be the same on other nodes in the network. + +Splitting local elements out from the global ones allows some local customization to the way data is organized within any given node. It also minimizes the amount of 'stuff' that must be kept consistent across the decentralized network in order to have a well behaving database. + +Local data definitions are always defined on the [collection](#collections). + +Examples include indexes, field IDs, and [lens transforms](https://docs.source.network/defradb/guides/schema-migration). + +## Global definitions + +Global definitions are consistent across all nodes in the decentralized network. This is enforced by the use of things like CIDs for schema versions. If a global definition was to differ across nodes, the different variations will be treated as a completely different definitions. + +Global data definitions are always defined on the [schema](#schemas). + +Examples include field names, field kinds and [CRDTs](https://docs.source.network/defradb/guides/merkle-crdt). + +## Collections + +Collections represent [local](#local-definitions), independently queryable datasets sharing the same shape. + +Collections are defined by the `CollectionDescription` struct. This can be mutated via the `PatchCollection` function. + +A collection will always have a [global](#global-definitions) shape defined by a single [schema](#schemas) version. + +### Versions + +`CollectionDescription` instances may be active or inactive. Inactive `CollectionDescription`s will not have a name, and cannot be queried. + +When a new [schema](#schemas) version is created and has a collection defined for it, a new `CollectionDescription` instance will be created and linked to the new schema version. The new `CollectionDescription` instance will share the same root ID as the previous, and may be active or inactive depending on what arguments the user defining the new schema specified. + +[Lens migrations](https://docs.source.network/defradb/guides/schema-migration) between collection versions may be defined. These are, like everything on the collection, [local](#local-definitions). They allow transformation of data between versions, allowing documents synced across the node network at one schema version to be presented to users at **query time** at another version. + +### Collection fields + +The set of fields on a `CollectionDescription` defines [local](#local-definitions) aspects to [globally](#global-definitions) defined fields on the collection's [schema](#schemas). The set may also include local-only fields that are not defined on the schema, and will not be synced to other nodes - currently these are limited the secondary side of a relationship defined between two collections. + +### Views + +Collections are not limited to representing writeable data. Collections can also represent views of written data. + +Views are collections with a `QuerySource` source in the `Sources` set. On query they will fetch data from the query defined on `QuerySource`, and then (optionally) apply a [Lens](https://github.com/lens-vm/lens) transform before yielding the results to the user. The query may point to another view, allowing views of views of views. + +Views may be defined using the `AddView` function. + +### Embedded types + +Some fields on a collection may represent a complex object, typically these will be a relationship to another collection, however they may instead represent and embedded type. + +Embedded types cannot exist or be queried outside of the context of their host collection, and thus are only defined as a [global](#global-definitions) shape represented by a [schema](#schemas) only. + +Related objects defined in a [view](#views) are embedded objects. + +## Schemas + +Schemas represent [global](#global-definitions) data shapes. They cannot host document data themselves or be queried, that is done via [collections](#collections). + +Schemas are defined by the `SchemaDescription` struct. They are immutable, however new versions can be created using the `PatchSchema` function. + +Multiple [collections](#collections) may reference the same schema. From c42e7ee6fe85761214d88a8bbbea81524cc3f3f7 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 23 Apr 2024 12:33:48 -0700 Subject: [PATCH 37/49] feat: Update corelog to 0.0.7 (#2547) ## Relevant issue(s) Resolves #2546 ## Description This PR updates `corelog` to version 0.0.7. It also adds the `log.nocolor` config. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- cli/config.go | 1 + cli/config_test.go | 1 + cli/root.go | 6 ++++++ docs/config.md | 4 ++++ go.mod | 7 ++++--- go.sum | 13 ++++++++----- 6 files changed, 24 insertions(+), 8 deletions(-) diff --git a/cli/config.go b/cli/config.go index 54d7529121..fd275a2d01 100644 --- a/cli/config.go +++ b/cli/config.go @@ -46,6 +46,7 @@ var configFlags = map[string]string{ "log.stacktrace": "log-stacktrace", "log.source": "log-source", "log.overrides": "log-overrides", + "log.nocolor": "log-no-color", "api.address": "url", "datastore.maxtxnretries": "max-txn-retries", "datastore.store": "store", diff --git a/cli/config_test.go b/cli/config_test.go index 492774398c..39a17d60fd 100644 --- a/cli/config_test.go +++ b/cli/config_test.go @@ -58,4 +58,5 @@ func TestLoadConfigNotExist(t *testing.T) { assert.Equal(t, false, cfg.GetBool("log.stacktrace")) assert.Equal(t, false, cfg.GetBool("log.source")) assert.Equal(t, "", cfg.GetString("log.overrides")) + assert.Equal(t, false, cfg.GetBool("log.nocolor")) } diff --git a/cli/root.go b/cli/root.go index 6ba7af1f1c..8fc8baf628 100644 --- a/cli/root.go +++ b/cli/root.go @@ -73,6 +73,12 @@ Start a DefraDB node, interact with a local or remote node, and much more. "Logger config overrides. Format ,=,...;,...", ) + cmd.PersistentFlags().Bool( + "log-no-color", + false, + "Disable colored log output", + ) + cmd.PersistentFlags().String( "url", "127.0.0.1:9181", diff --git a/docs/config.md b/docs/config.md index 80c6d437ec..da46700bb7 100644 --- a/docs/config.md +++ b/docs/config.md @@ -88,3 +88,7 @@ Include source location in logs. Defaults to `false`. ## `log.overrides` Logger config overrides. Format `,=,...;,...`. + +## `log.nocolor` + +Disable colored log output. Defaults to `false`. diff --git a/go.mod b/go.mod index 9ef7ab83e4..d24fa79331 100644 --- a/go.mod +++ b/go.mod @@ -31,7 +31,7 @@ require ( github.com/multiformats/go-multibase v0.2.0 github.com/multiformats/go-multihash v0.2.3 github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 - github.com/sourcenetwork/corelog v0.0.6 + github.com/sourcenetwork/corelog v0.0.7 github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13 github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd github.com/sourcenetwork/immutable v0.3.0 @@ -208,6 +208,7 @@ require ( github.com/libp2p/go-reuseport v0.4.0 // indirect github.com/libp2p/go-yamux/v4 v4.0.1 // indirect github.com/linxGnu/grocksdb v1.8.12 // indirect + github.com/lmittmann/tint v1.0.4 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd // indirect @@ -293,8 +294,8 @@ require ( golang.org/x/mod v0.15.0 // indirect golang.org/x/net v0.21.0 // indirect golang.org/x/sync v0.6.0 // indirect - golang.org/x/sys v0.18.0 // indirect - golang.org/x/term v0.17.0 // indirect + golang.org/x/sys v0.19.0 // indirect + golang.org/x/term v0.19.0 // indirect golang.org/x/text v0.14.0 // indirect golang.org/x/tools v0.18.0 // indirect gonum.org/v1/gonum v0.14.0 // indirect diff --git a/go.sum b/go.sum index f5fd826cbd..d11528448c 100644 --- a/go.sum +++ b/go.sum @@ -738,6 +738,8 @@ github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-b github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= github.com/linxGnu/grocksdb v1.8.12 h1:1/pCztQUOa3BX/1gR3jSZDoaKFpeHFvQ1XrqZpSvZVo= github.com/linxGnu/grocksdb v1.8.12/go.mod h1:xZCIb5Muw+nhbDK4Y5UJuOrin5MceOuiXkVUR7vp4WY= +github.com/lmittmann/tint v1.0.4 h1:LeYihpJ9hyGvE0w+K2okPTGUdVLfng1+nDNVR4vWISc= +github.com/lmittmann/tint v1.0.4/go.mod h1:HIS3gSy7qNwGCj+5oRjAutErFBl4BzdQP6cJZ0NfMwE= github.com/lunixbochs/vtclean v1.0.0/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI= github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= @@ -1051,8 +1053,8 @@ github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIK github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 h1:TpQDDPfucDgCNH0NVqVUk6SSq6T6G8p9HIocmwZh9Tg= github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276/go.mod h1:lxiZTDBw0vheFMqSwX2OvB6RTDI1+/UtVCSU4rpThFM= -github.com/sourcenetwork/corelog v0.0.6 h1:3q3Kh1G0C4iHimkPrOpAZOKIKESIv4zZ51vKuY8pANA= -github.com/sourcenetwork/corelog v0.0.6/go.mod h1:mXsBA4ujUt0lAkDFoHoXuaIQjhdeXi+RfDNV7ZTiy5E= +github.com/sourcenetwork/corelog v0.0.7 h1:vztssVAUDcsYN5VUOW3PKYhLprHfzoc8UbKewQuD1qw= +github.com/sourcenetwork/corelog v0.0.7/go.mod h1:cMabHgs3kARgYTQeQYSOmaGGP8XMU6sZrHd8LFrL3zA= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13 h1:d/PeGZutd5NcDr6ltAv8ubN5PxsHMp1YUnhHY/QCWB4= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.13/go.mod h1:jUoQv592uUX1u7QBjAY4C+l24X9ArhPfifOqXpDHz4U= github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd h1:lmpW39/8wPJ0khWRhOcj7Bj0HYKbSmQ8rXMJw1cMB8U= @@ -1393,14 +1395,15 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= -golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= From 8419297486ea826f63958830eaaca08b21c3bebf Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 17:12:01 -0400 Subject: [PATCH 38/49] bot: Update dependencies (bulk dependabot PRs) 23-04-2023 (#2548) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 📓 Note: Updated `make tidy` to the minimum required patch version because of `corelog`. ✅ This PR was created by combining the following PRs: #2543 bot: Bump @typescript-eslint/parser from 7.3.1 to 7.7.0 in /playground #2541 bot: Bump swagger-ui-react from 5.15.0 to 5.16.2 in /playground #2540 bot: Bump @types/react from 18.2.74 to 18.2.79 in /playground #2532 bot: Bump golang.org/x/net from 0.21.0 to 0.23.0 #2522 bot: Bump github.com/cosmos/gogoproto from 1.4.11 to 1.4.12 #2521 bot: Bump github.com/gofrs/uuid/v5 from 5.0.0 to 5.1.0 #2520 bot: Bump github.com/ipfs/boxo from 0.18.0 to 0.19.0 #2519 bot: Bump google.golang.org/grpc from 1.62.1 to 1.63.2 ⚠️ The following PRs were resolved manually due to merge conflicts: #2542 bot: Bump @typescript-eslint/eslint-plugin from 7.5.0 to 7.7.0 in /playground #2524 bot: Bump @types/react-dom from 18.2.24 to 18.2.25 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- Makefile | 2 +- go.mod | 18 +- go.sum | 35 +- playground/package-lock.json | 1041 ++++++++++++++++++++++++++++++---- playground/package.json | 10 +- 5 files changed, 955 insertions(+), 151 deletions(-) diff --git a/Makefile b/Makefile index cde535be4b..658b514a4b 100644 --- a/Makefile +++ b/Makefile @@ -202,7 +202,7 @@ verify: .PHONY: tidy tidy: - go mod tidy -go=1.21 + go mod tidy -go=1.21.3 .PHONY: clean clean: diff --git a/go.mod b/go.mod index d24fa79331..0bfa96d5b9 100644 --- a/go.mod +++ b/go.mod @@ -5,16 +5,16 @@ go 1.21.3 require ( github.com/bits-and-blooms/bitset v1.13.0 github.com/bxcodec/faker v2.0.1+incompatible - github.com/cosmos/gogoproto v1.4.11 + github.com/cosmos/gogoproto v1.4.12 github.com/evanphx/json-patch/v5 v5.9.0 github.com/fxamacker/cbor/v2 v2.6.0 github.com/getkin/kin-openapi v0.124.0 github.com/go-chi/chi/v5 v5.0.12 github.com/go-chi/cors v1.2.1 github.com/go-errors/errors v1.5.1 - github.com/gofrs/uuid/v5 v5.0.0 + github.com/gofrs/uuid/v5 v5.1.0 github.com/iancoleman/strcase v0.3.0 - github.com/ipfs/boxo v0.18.0 + github.com/ipfs/boxo v0.19.0 github.com/ipfs/go-block-format v0.2.0 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 @@ -48,7 +48,7 @@ require ( go.opentelemetry.io/otel/sdk/metric v1.25.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 - google.golang.org/grpc v1.62.1 + google.golang.org/grpc v1.63.2 google.golang.org/protobuf v1.33.0 ) @@ -290,18 +290,18 @@ require ( go.uber.org/fx v1.20.1 // indirect go.uber.org/mock v0.4.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.19.0 // indirect + golang.org/x/crypto v0.21.0 // indirect golang.org/x/mod v0.15.0 // indirect - golang.org/x/net v0.21.0 // indirect + golang.org/x/net v0.23.0 // indirect golang.org/x/sync v0.6.0 // indirect golang.org/x/sys v0.19.0 // indirect golang.org/x/term v0.19.0 // indirect golang.org/x/text v0.14.0 // indirect golang.org/x/tools v0.18.0 // indirect gonum.org/v1/gonum v0.14.0 // indirect - google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240221002015-b0ce06bbee7c // indirect + google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect gotest.tools/v3 v3.5.1 // indirect diff --git a/go.sum b/go.sum index d11528448c..c7ac5763e2 100644 --- a/go.sum +++ b/go.sum @@ -199,8 +199,8 @@ github.com/cosmos/go-bip39 v1.0.0/go.mod h1:RNJv0H/pOIVgxw6KS7QeX2a0Uo0aKUlfhZ4x github.com/cosmos/gogogateway v1.2.0 h1:Ae/OivNhp8DqBi/sh2A8a1D0y638GpL3tkmLQAiKxTE= github.com/cosmos/gogogateway v1.2.0/go.mod h1:iQpLkGWxYcnCdz5iAdLcRBSw3h7NXeOkZ4GUkT+tbFI= github.com/cosmos/gogoproto v1.4.2/go.mod h1:cLxOsn1ljAHSV527CHOtaIP91kK6cCrZETRBrkzItWU= -github.com/cosmos/gogoproto v1.4.11 h1:LZcMHrx4FjUgrqQSWeaGC1v/TeuVFqSLa43CC6aWR2g= -github.com/cosmos/gogoproto v1.4.11/go.mod h1:/g39Mh8m17X8Q/GDEs5zYTSNaNnInBSohtaxzQnYq1Y= +github.com/cosmos/gogoproto v1.4.12 h1:vB6Lbe/rtnYGjQuFxkPiPYiCybqFT8QvLipDZP8JpFE= +github.com/cosmos/gogoproto v1.4.12/go.mod h1:LnZob1bXRdUoqMMtwYlcR3wjiElmlC+FkjaZRv1/eLY= github.com/cosmos/gorocksdb v1.2.0 h1:d0l3jJG8M4hBouIZq0mDUHZ+zjOx044J3nGRskwTb4Y= github.com/cosmos/gorocksdb v1.2.0/go.mod h1:aaKvKItm514hKfNJpUJXnnOWeBnk2GL4+Qw9NHizILw= github.com/cosmos/iavl v1.0.1 h1:D+mYbcRO2wptYzOM1Hxl9cpmmHU1ZEt9T2Wv5nZTeUw= @@ -386,8 +386,8 @@ github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5x github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/uuid/v5 v5.0.0 h1:p544++a97kEL+svbcFbCQVM9KFu0Yo25UoISXGNNH9M= -github.com/gofrs/uuid/v5 v5.0.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= +github.com/gofrs/uuid/v5 v5.1.0 h1:S5rqVKIigghZTCBKPCw0Y+bXkn26K3TB5mvQq2Ix8dk= +github.com/gofrs/uuid/v5 v5.1.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= github.com/gogo/googleapis v1.4.1-0.20201022092350-68b0159b7869/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c= github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= @@ -587,8 +587,8 @@ github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= github.com/invopop/yaml v0.2.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q= github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs= github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0= -github.com/ipfs/boxo v0.18.0 h1:MOL9/AgoV3e7jlVMInicaSdbgralfqSsbkc31dZ9tmw= -github.com/ipfs/boxo v0.18.0/go.mod h1:pIZgTWdm3k3pLF9Uq6MB8JEcW07UDwNJjlXW1HELW80= +github.com/ipfs/boxo v0.19.0 h1:UbX9FBJQF19ACLqRZOgdEla6jR/sC4H1O+iGE0NToXA= +github.com/ipfs/boxo v0.19.0/go.mod h1:V5gJzbIMwKEXrg3IdvAxIdF7UPgU4RsXmNGS8MQ/0D4= github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA= github.com/ipfs/go-bitfield v1.1.0/go.mod h1:paqf1wjq/D2BBmzfTVFlJQ9IlFOZpg422HL0HqsGWHU= github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs= @@ -1247,8 +1247,9 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= @@ -1310,8 +1311,8 @@ golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1476,12 +1477,12 @@ google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20220314164441-57ef72a4c106/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 h1:9+tzLLstTlPTRyJTh+ah5wIMsBW5c4tQwGTN3thOW9Y= -google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9/go.mod h1:mqHbVIp48Muh7Ywss/AD6I5kNVKZMmAa/QEW58Gxp2s= -google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014 h1:x9PwdEgd11LgK+orcck69WVRo7DezSO4VUMPI4xpc8A= -google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014/go.mod h1:rbHMSEDyoYX62nRVLOCc4Qt1HbsdytAYoVwgjiOhF3I= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240221002015-b0ce06bbee7c h1:NUsgEN92SQQqzfA+YtqYNqYmB3DMMYLlIwUZAQFVFbo= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240221002015-b0ce06bbee7c/go.mod h1:H4O17MA/PE9BsGx3w+a+W2VOLLD1Qf7oJneAoU6WktY= +google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY= +google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de h1:jFNzHPIeuzhdRwVhbZdiym9q0ory/xY3sA+v2wPg8I0= +google.golang.org/genproto/googleapis/api v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:5iCWqnniDlqZHrd3neWVTOwvh/v6s3232omMecelax8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de h1:cZGRis4/ot9uVm639a+rHCUaG0JJHEsdyzSQTMX+suY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:H4O17MA/PE9BsGx3w+a+W2VOLLD1Qf7oJneAoU6WktY= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= @@ -1502,8 +1503,8 @@ google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= -google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= +google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM= +google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/playground/package-lock.json b/playground/package-lock.json index e0fad98390..1df021da7b 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -12,14 +12,14 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.15.0" + "swagger-ui-react": "^5.16.2" }, "devDependencies": { - "@types/react": "^18.2.74", - "@types/react-dom": "^18.2.24", + "@types/react": "^18.2.79", + "@types/react-dom": "^18.2.25", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.5.0", - "@typescript-eslint/parser": "^7.3.1", + "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/parser": "^7.7.0", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -2383,9 +2383,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.74", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.74.tgz", - "integrity": "sha512-9AEqNZZyBx8OdZpxzQlaFEVCSFUM2YXJH46yPOiOpm078k6ZLOCcuAzGum/zK8YBwY+dbahVNbHrbgrAwIRlqw==", + "version": "18.2.79", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.79.tgz", + "integrity": "sha512-RwGAGXPl9kSXwdNTafkOEuFrTBD5SA2B3iEB96xi8+xu5ddUa/cpvyVCSNn+asgLCTHkb5ZxN8gbuibYJi4s1w==", "devOptional": true, "dependencies": { "@types/prop-types": "*", @@ -2393,9 +2393,9 @@ } }, "node_modules/@types/react-dom": { - "version": "18.2.24", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.24.tgz", - "integrity": "sha512-cN6upcKd8zkGy4HU9F1+/s98Hrp6D4MOcippK4PoE8OZRngohHZpbJn1GsaDLz87MqvHNoT13nHvNqM9ocRHZg==", + "version": "18.2.25", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.25.tgz", + "integrity": "sha512-o/V48vf4MQh7juIKZU2QGDfli6p1+OOi5oXx36Hffpc9adsHeXjVp8rHuPkjd8VT8sOJ2Zp05HR7CdpGTIUFUA==", "devOptional": true, "dependencies": { "@types/react": "*" @@ -2435,22 +2435,22 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.5.0.tgz", - "integrity": "sha512-HpqNTH8Du34nLxbKgVMGljZMG0rJd2O9ecvr2QLYp+7512ty1j42KnsFwspPXg1Vh8an9YImf6CokUBltisZFQ==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.0.tgz", + "integrity": "sha512-GJWR0YnfrKnsRoluVO3PRb9r5aMZriiMMM/RHj5nnTrBy1/wIgk76XCtCKcnXGjpZQJQRFtGV9/0JJ6n30uwpQ==", "dev": true, "dependencies": { - "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "7.5.0", - "@typescript-eslint/type-utils": "7.5.0", - "@typescript-eslint/utils": "7.5.0", - "@typescript-eslint/visitor-keys": "7.5.0", + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/type-utils": "7.7.0", + "@typescript-eslint/utils": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4", "graphemer": "^1.4.0", - "ignore": "^5.2.4", + "ignore": "^5.3.1", "natural-compare": "^1.4.0", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2470,15 +2470,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.3.1.tgz", - "integrity": "sha512-Rq49+pq7viTRCH48XAbTA+wdLRrB/3sRq4Lpk0oGDm0VmnjBrAOVXH/Laalmwsv2VpekiEfVFwJYVk6/e8uvQw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.0.tgz", + "integrity": "sha512-fNcDm3wSwVM8QYL4HKVBggdIPAy9Q41vcvC/GtDobw3c4ndVT3K6cqudUmjHPw8EAp4ufax0o58/xvWaP2FmTg==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.3.1", - "@typescript-eslint/types": "7.3.1", - "@typescript-eslint/typescript-estree": "7.3.1", - "@typescript-eslint/visitor-keys": "7.3.1", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/typescript-estree": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4" }, "engines": { @@ -2498,13 +2498,13 @@ } }, "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.3.1.tgz", - "integrity": "sha512-fVS6fPxldsKY2nFvyT7IP78UO1/I2huG+AYu5AMjCT9wtl6JFiDnsv4uad4jQ0GTFzcUV5HShVeN96/17bTBag==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.0.tgz", + "integrity": "sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.3.1", - "@typescript-eslint/visitor-keys": "7.3.1" + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2515,9 +2515,9 @@ } }, "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.3.1.tgz", - "integrity": "sha512-2tUf3uWggBDl4S4183nivWQ2HqceOZh1U4hhu4p1tPiIJoRRXrab7Y+Y0p+dozYwZVvLPRI6r5wKe9kToF9FIw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.0.tgz", + "integrity": "sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==", "dev": true, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2528,19 +2528,19 @@ } }, "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.3.1.tgz", - "integrity": "sha512-tLpuqM46LVkduWP7JO7yVoWshpJuJzxDOPYIVWUUZbW+4dBpgGeUdl/fQkhuV0A8eGnphYw3pp8d2EnvPOfxmQ==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.0.tgz", + "integrity": "sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.3.1", - "@typescript-eslint/visitor-keys": "7.3.1", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", - "minimatch": "9.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2556,13 +2556,13 @@ } }, "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.3.1.tgz", - "integrity": "sha512-9RMXwQF8knsZvfv9tdi+4D/j7dMG28X/wMJ8Jj6eOHyHWwDW4ngQJcqEczSsqIKKjFiLFr40Mnr7a5ulDD3vmw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.0.tgz", + "integrity": "sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.3.1", - "eslint-visitor-keys": "^3.4.1" + "@typescript-eslint/types": "7.7.0", + "eslint-visitor-keys": "^3.4.3" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2572,14 +2572,29 @@ "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/@typescript-eslint/parser/node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.5.0.tgz", - "integrity": "sha512-Z1r7uJY0MDeUlql9XJ6kRVgk/sP11sr3HKXn268HZyqL7i4cEfrdFuSSY/0tUqT37l5zT0tJOsuDP16kio85iA==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.0.tgz", + "integrity": "sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.5.0", - "@typescript-eslint/visitor-keys": "7.5.0" + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2590,15 +2605,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.5.0.tgz", - "integrity": "sha512-A021Rj33+G8mx2Dqh0nMO9GyjjIBK3MqgVgZ2qlKf6CJy51wY/lkkFqq3TqqnH34XyAHUkq27IjlUkWlQRpLHw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.0.tgz", + "integrity": "sha512-bOp3ejoRYrhAlnT/bozNQi3nio9tIgv3U5C0mVDdZC7cpcQEDZXvq8inrHYghLVwuNABRqrMW5tzAv88Vy77Sg==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.5.0", - "@typescript-eslint/utils": "7.5.0", + "@typescript-eslint/typescript-estree": "7.7.0", + "@typescript-eslint/utils": "7.7.0", "debug": "^4.3.4", - "ts-api-utils": "^1.0.1" + "ts-api-utils": "^1.3.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2617,9 +2632,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.5.0.tgz", - "integrity": "sha512-tv5B4IHeAdhR7uS4+bf8Ov3k793VEVHd45viRRkehIUZxm0WF82VPiLgHzA/Xl4TGPg1ZD49vfxBKFPecD5/mg==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.0.tgz", + "integrity": "sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==", "dev": true, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2630,19 +2645,19 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.5.0.tgz", - "integrity": "sha512-YklQQfe0Rv2PZEueLTUffiQGKQneiIEKKnfIqPIOxgM9lKSZFCjT5Ad4VqRKj/U4+kQE3fa8YQpskViL7WjdPQ==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.0.tgz", + "integrity": "sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.5.0", - "@typescript-eslint/visitor-keys": "7.5.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/visitor-keys": "7.7.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", - "minimatch": "9.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2657,19 +2672,34 @@ } } }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@typescript-eslint/utils": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.5.0.tgz", - "integrity": "sha512-3vZl9u0R+/FLQcpy2EHyRGNqAS/ofJ3Ji8aebilfJe+fobK8+LbIFmrHciLVDxjDoONmufDcnVSF38KwMEOjzw==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.0.tgz", + "integrity": "sha512-LKGAXMPQs8U/zMRFXDZOzmMKgFv3COlxUQ+2NMPhbqgVm6R1w+nU1i4836Pmxu9jZAuIeyySNrN/6Rc657ggig==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@types/json-schema": "^7.0.12", - "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "7.5.0", - "@typescript-eslint/types": "7.5.0", - "@typescript-eslint/typescript-estree": "7.5.0", - "semver": "^7.5.4" + "@types/json-schema": "^7.0.15", + "@types/semver": "^7.5.8", + "@typescript-eslint/scope-manager": "7.7.0", + "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/typescript-estree": "7.7.0", + "semver": "^7.6.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2683,13 +2713,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.5.0.tgz", - "integrity": "sha512-mcuHM/QircmA6O7fy6nn2w/3ditQkj+SgtOc8DW3uQ10Yfj42amm2i+6F2K4YAOPNNTmE6iM1ynM6lrSwdendA==", + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.0.tgz", + "integrity": "sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.5.0", - "eslint-visitor-keys": "^3.4.1" + "@typescript-eslint/types": "7.7.0", + "eslint-visitor-keys": "^3.4.3" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2798,6 +2828,21 @@ "node": ">=10" } }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", + "dependencies": { + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/array-union": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", @@ -2807,6 +2852,27 @@ "node": ">=8" } }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", + "is-shared-array-buffer": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -2828,6 +2894,20 @@ "tslib": "^2.3.0" } }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/axios": { "version": "1.6.8", "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz", @@ -3108,9 +3188,9 @@ } }, "node_modules/core-js-pure": { - "version": "3.36.1", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.36.1.tgz", - "integrity": "sha512-NXCvHvSVYSrewP0L5OhltzXeWFJLo2AL2TYnj6iLV3Bw8mM62wAQMNgUCRI6EBu6hVVpbCxmOPlxh1Ikw2PfUA==", + "version": "3.37.0", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.37.0.tgz", + "integrity": "sha512-d3BrpyFr5eD4KcbRvQ3FTUx/KWmaDesr7+a3+1+P46IUnNoEt+oiLijPINZMEon7w9oGkIINWxrBAU9DEciwFQ==", "hasInstallScript": true, "funding": { "type": "opencollective", @@ -3141,6 +3221,54 @@ "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", "devOptional": true }, + "node_modules/data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -3211,6 +3339,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -3290,6 +3434,65 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, + "node_modules/es-abstract": { + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", + "globalthis": "^1.0.3", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.3", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.13", + "is-weakref": "^1.0.2", + "object-inspect": "^1.13.1", + "object-keys": "^1.1.1", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.15" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/es-define-property": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", @@ -3309,6 +3512,46 @@ "node": ">= 0.4" } }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "dependencies": { + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/esbuild": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", @@ -3711,6 +3954,14 @@ } } }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dependencies": { + "is-callable": "^1.1.3" + } + }, "node_modules/form-data": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", @@ -3807,6 +4058,31 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/function.prototype.name": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/get-intrinsic": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", @@ -3833,6 +4109,22 @@ "node": ">=6" } }, + "node_modules/get-symbol-description": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", + "dependencies": { + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -3905,6 +4197,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "dependencies": { + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/globby": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", @@ -3986,6 +4292,14 @@ "graphql": "^15.5.0 || ^16.0.0" } }, + "node_modules/has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -4027,10 +4341,24 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/hasown": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.1.tgz", - "integrity": "sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dependencies": { "function-bind": "^1.1.2" }, @@ -4157,6 +4485,19 @@ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", "optional": true }, + "node_modules/internal-slot": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.0", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/invariant": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", @@ -4187,6 +4528,86 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/is-array-buffer": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "dependencies": { + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-decimal": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", @@ -4240,6 +4661,17 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -4248,6 +4680,20 @@ "node": ">=0.12.0" } }, + "node_modules/is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-path-inside": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", @@ -4276,6 +4722,88 @@ "node": ">=0.10.0" } }, + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", + "dependencies": { + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", + "dependencies": { + "which-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-wsl": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", @@ -4656,9 +5184,9 @@ "dev": true }, "node_modules/node-abi": { - "version": "3.57.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.57.0.tgz", - "integrity": "sha512-Dp+A9JWxRaKuHP35H77I4kCKesDy5HUDEmScia2FyncMTOXASMyg251F5PhFoDA5uqBrDDffiLpbqnrZmNXW+g==", + "version": "3.60.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.60.0.tgz", + "integrity": "sha512-zcGgwoXbzw9NczqbGzAWL/ToDYAxv1V8gL1D67ClbdkIfeeDBbY0GelZtC25ayLvVjr2q2cloHeQV1R0QAWqRQ==", "optional": true, "dependencies": { "semver": "^7.3.5" @@ -4735,6 +5263,23 @@ "node": ">= 0.4" } }, + "node_modules/object.assign": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", + "dependencies": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -4952,6 +5497,14 @@ "tslib": "^2.1.0" } }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/postcss": { "version": "8.4.38", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", @@ -5086,9 +5639,9 @@ } }, "node_modules/qs": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.12.0.tgz", - "integrity": "sha512-trVZiI6RMOkO476zLGaBIzszOdFPnCCXHPG9kn0yuS1uz6xdVxPfZdB3vUig9pxPFDM9BRAgz/YUIVQ1/vuiUg==", + "version": "6.12.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.12.1.tgz", + "integrity": "sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==", "dependencies": { "side-channel": "^1.0.6" }, @@ -5274,9 +5827,9 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/react-redux": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.1.0.tgz", - "integrity": "sha512-6qoDzIO+gbrza8h3hjMA9aq4nwVFCKFtY2iLxCtVT38Swyy2C/dJCGBXHeHLtx6qlg/8qzc2MrhOeduf5K32wQ==", + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.1.1.tgz", + "integrity": "sha512-5ynfGDzxxsoV73+4czQM56qF43vsmgJsO22rmAvU5tZT2z5Xow/A2uhhxwXuGTxgdReF3zcp7A80gma2onRs1A==", "dependencies": { "@types/use-sync-external-store": "^0.0.3", "use-sync-external-store": "^1.0.0" @@ -5435,6 +5988,23 @@ "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", + "dependencies": { + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/remarkable": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/remarkable/-/remarkable-2.0.1.tgz", @@ -5575,6 +6145,23 @@ "queue-microtask": "^1.2.2" } }, + "node_modules/safe-array-concat": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -5594,6 +6181,22 @@ } ] }, + "node_modules/safe-regex-test": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-regex": "^1.1.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/scheduler": { "version": "0.23.0", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", @@ -5646,6 +6249,20 @@ "node": ">= 0.4" } }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/set-value": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/set-value/-/set-value-4.1.0.tgz", @@ -5811,6 +6428,52 @@ "safe-buffer": "~5.2.0" } }, + "node_modules/string.prototype.trim": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -5862,16 +6525,16 @@ } }, "node_modules/swagger-client": { - "version": "3.26.6", - "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.26.6.tgz", - "integrity": "sha512-PYYca8BsamZaOjlKr5ombOTeDje1ddiYAKGstpmAU3iU+mBKgqHiw5G2J77SC9+chKU5y0aJzmQX4hNu3p2y5A==", + "version": "3.27.0", + "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.27.0.tgz", + "integrity": "sha512-DyuHrUzHxysUbhsCgPfsWKH3EULDwjD7jkRl0SHvXAd6gtZS3e3RHUZjDBA4cbrcgDiIuC7Ju/YIOkE+rKCutw==", "dependencies": { "@babel/runtime-corejs3": "^7.22.15", - "@swagger-api/apidom-core": ">=0.99.0 <1.0.0", + "@swagger-api/apidom-core": ">=0.99.1 <1.0.0", "@swagger-api/apidom-error": ">=0.99.0 <1.0.0", - "@swagger-api/apidom-json-pointer": ">=0.99.0 <1.0.0", - "@swagger-api/apidom-ns-openapi-3-1": ">=0.99.0 <1.0.0", - "@swagger-api/apidom-reference": ">=0.99.0 <1.0.0", + "@swagger-api/apidom-json-pointer": ">=0.99.1 <1.0.0", + "@swagger-api/apidom-ns-openapi-3-1": ">=0.99.1 <1.0.0", + "@swagger-api/apidom-reference": ">=0.99.1 <1.0.0", "cookie": "~0.6.0", "deepmerge": "~4.3.0", "fast-json-patch": "^3.0.0-1", @@ -5892,9 +6555,9 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.15.0.tgz", - "integrity": "sha512-ptagY3F/4Mo1kW1i7m8RJuHOH6zNOmU3LBEqMnTeaYQ9cPUTCpGjLTxWG0IIXasOIpx4kMJqwYhQcFOuw6LsZg==", + "version": "5.16.2", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.16.2.tgz", + "integrity": "sha512-trni55effpq3O2nqnBCMjr3mmFpkpBvhAQa9MDyWleRSkAt4z2aETYiU9/yaIJqrzEL92gUnI7f8cVC2ncW51g==", "dependencies": { "@babel/runtime-corejs3": "^7.24.4", "@braintree/sanitize-url": "=7.0.1", @@ -5917,7 +6580,7 @@ "react-immutable-proptypes": "2.2.0", "react-immutable-pure-component": "^2.2.0", "react-inspector": "^6.0.1", - "react-redux": "^9.1.0", + "react-redux": "^9.1.1", "react-syntax-highlighter": "^15.5.0", "redux": "^5.0.1", "redux-immutable": "^4.0.0", @@ -5925,7 +6588,7 @@ "reselect": "^5.1.0", "serialize-error": "^8.1.0", "sha.js": "^2.4.11", - "swagger-client": "^3.26.5", + "swagger-client": "^3.27.0", "url-parse": "^1.5.10", "xml": "=1.0.1", "xml-but-prettier": "^1.0.1", @@ -5998,9 +6661,14 @@ "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==" }, "node_modules/traverse": { - "version": "0.6.8", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.8.tgz", - "integrity": "sha512-aXJDbk6SnumuaZSANd21XAo15ucCDE38H4fkqiGsc3MhCK+wOlZvLP9cB/TvpHT0mOyWgC4Z8EwRlzqYSUzdsA==", + "version": "0.6.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", + "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", + "dependencies": { + "gopd": "^1.0.1", + "typedarray.prototype.slice": "^1.0.3", + "which-typed-array": "^1.1.15" + }, "engines": { "node": ">= 0.4" }, @@ -6040,9 +6708,9 @@ } }, "node_modules/ts-api-utils": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.2.1.tgz", - "integrity": "sha512-RIYA36cJn2WiH9Hy77hdF9r7oEwxAtB/TS9/S4Qd90Ap4z5FSiin5zEiTL44OII1Y3IIlEvxwxFUVgrHSZ/UpA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", "dev": true, "engines": { "node": ">=16" @@ -6101,6 +6769,94 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/typed-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typedarray.prototype.slice": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", + "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-errors": "^1.3.0", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-offset": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/types-ramda": { "version": "0.29.10", "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.10.tgz", @@ -6127,6 +6883,20 @@ "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" }, + "node_modules/unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "dependencies": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/universalify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", @@ -6307,6 +7077,39 @@ "node": ">= 8" } }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", diff --git a/playground/package.json b/playground/package.json index a0332b211b..e836a910ef 100644 --- a/playground/package.json +++ b/playground/package.json @@ -14,14 +14,14 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.15.0" + "swagger-ui-react": "^5.16.2" }, "devDependencies": { - "@types/react": "^18.2.74", - "@types/react-dom": "^18.2.24", + "@types/react": "^18.2.79", + "@types/react-dom": "^18.2.25", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.5.0", - "@typescript-eslint/parser": "^7.3.1", + "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/parser": "^7.7.0", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", From ed48a8b4263ec35989a73a8764600f8f151cfff3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 17:32:49 -0400 Subject: [PATCH 39/49] bot: Bump swagger-ui-react from 5.16.2 to 5.17.0 in /playground (#2549) Bumps [swagger-ui-react](https://github.com/swagger-api/swagger-ui) from 5.16.2 to 5.17.0.
Release notes

Sourced from swagger-ui-react's releases.

Swagger UI v5.17.0 Released!

5.17.0 (2024-04-22)

Features

  • config: expose config and make it overridable (#9862) (17d50a6)
  • swagger-ui-react: rewrite into SSR compatible function component (#9855) (351191b), closes #9243
Commits
  • 84f66a6 chore(release): cut the v5.17.0 release
  • 17d50a6 feat(config): expose config and make it overridable (#9862)
  • 351191b feat(swagger-ui-react): rewrite into SSR compatible function component (#9855)
  • fee426b chore(deps): bump nginx from 1.25.4-alpine to 1.25.5-alpine (#9859)
  • c8f892d refactor(config): export config options typecasing mechanism (#9854)
  • d578669 docs: fix typos in code comments (#9841)
  • 8c43f17 chore(deps-dev): bump babel-plugin-module-resolver from 5.0.1 to 5.0.2 (#9852)
  • 9742378 chore(deps-dev): bump cypress from 13.7.3 to 13.8.0 (#9851)
  • 6434c19 chore(deps-dev): bump postcss-preset-env from 9.5.5 to 9.5.6 (#9850)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=swagger-ui-react&package-manager=npm_and_yarn&previous-version=5.16.2&new-version=5.17.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 23 ++++------------------- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 20 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 1df021da7b..598e495958 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -12,7 +12,7 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.16.2" + "swagger-ui-react": "^5.17.0" }, "devDependencies": { "@types/react": "^18.2.79", @@ -5112,21 +5112,6 @@ "node": ">=6" } }, - "node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", @@ -6555,9 +6540,9 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.16.2", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.16.2.tgz", - "integrity": "sha512-trni55effpq3O2nqnBCMjr3mmFpkpBvhAQa9MDyWleRSkAt4z2aETYiU9/yaIJqrzEL92gUnI7f8cVC2ncW51g==", + "version": "5.17.0", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.17.0.tgz", + "integrity": "sha512-z9IwISLWdlrWfLT9p0eTOoW1eDHEu/InEzZhKxZA+ylUbbNHLIdMwKHaalbnv2dhjtsLs7Nf87hHJs2wKtn2iA==", "dependencies": { "@babel/runtime-corejs3": "^7.24.4", "@braintree/sanitize-url": "=7.0.1", diff --git a/playground/package.json b/playground/package.json index e836a910ef..e3d100d786 100644 --- a/playground/package.json +++ b/playground/package.json @@ -14,7 +14,7 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.16.2" + "swagger-ui-react": "^5.17.0" }, "devDependencies": { "@types/react": "^18.2.79", From 8de0f997354dfc7f62bd879724b71ec5a9af4c7c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 18:23:32 -0400 Subject: [PATCH 40/49] bot: Bump @typescript-eslint/eslint-plugin from 7.7.0 to 7.7.1 in /playground (#2551) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) from 7.7.0 to 7.7.1.
Release notes

Sourced from @​typescript-eslint/eslint-plugin's releases.

v7.7.1

7.7.1 (2024-04-22)

🩹 Fixes

  • eslint-plugin: [no-unsafe-assignment] handle shorthand property assignment (#8800)
  • eslint-plugin: [explicit-function-return-type] fix checking wrong ancestor's return type (#8809)
  • eslint-plugin: [prefer-optional-chain] only look at left operand for requireNullish (#8559)
  • eslint-plugin: [no-for-in-array] refine report location (#8874)
  • eslint-plugin: [no-unnecessary-type-assertion] allow non-null assertion for void type (#8912)

❤️ Thank You

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/eslint-plugin's changelog.

7.7.1 (2024-04-22)

🩹 Fixes

  • eslint-plugin: [no-unsafe-assignment] handle shorthand property assignment

  • eslint-plugin: [explicit-function-return-type] fix checking wrong ancestor's return type

  • eslint-plugin: [prefer-optional-chain] only look at left operand for requireNullish

  • eslint-plugin: [no-for-in-array] refine report location

  • eslint-plugin: [no-unnecessary-type-assertion] allow non-null assertion for void type

❤️ Thank You

  • Abraham Guo
  • Kirk Waiblinger
  • YeonJuan

You can read about our versioning strategy and releases on our website.

Commits
  • 3e19436 chore(release): publish 7.7.1
  • b2552ca fix(eslint-plugin): [no-unnecessary-type-assertion] allow non-null assertion ...
  • fdeba42 fix(eslint-plugin): [no-for-in-array] refine report location (#8874)
  • eef257b fix(eslint-plugin): [prefer-optional-chain] only look at left operand for `re...
  • 9e0d9f5 fix(eslint-plugin): [explicit-function-return-type] fix checking wrong ancest...
  • d07eb9e fix(eslint-plugin): [no-unsafe-assignment] handle shorthand property assignme...
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/eslint-plugin&package-manager=npm_and_yarn&previous-version=7.7.0&new-version=7.7.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 117 +++++++++++++++-------------------- playground/package.json | 2 +- 2 files changed, 52 insertions(+), 67 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 598e495958..351001f613 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -18,7 +18,7 @@ "@types/react": "^18.2.79", "@types/react-dom": "^18.2.25", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/eslint-plugin": "^7.7.1", "@typescript-eslint/parser": "^7.7.0", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", @@ -2435,16 +2435,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.0.tgz", - "integrity": "sha512-GJWR0YnfrKnsRoluVO3PRb9r5aMZriiMMM/RHj5nnTrBy1/wIgk76XCtCKcnXGjpZQJQRFtGV9/0JJ6n30uwpQ==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.1.tgz", + "integrity": "sha512-KwfdWXJBOviaBVhxO3p5TJiLpNuh2iyXyjmWN0f1nU87pwyvfS0EmjC6ukQVYVFJd/K1+0NWGPDXiyEyQorn0Q==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "7.7.0", - "@typescript-eslint/type-utils": "7.7.0", - "@typescript-eslint/utils": "7.7.0", - "@typescript-eslint/visitor-keys": "7.7.0", + "@typescript-eslint/scope-manager": "7.7.1", + "@typescript-eslint/type-utils": "7.7.1", + "@typescript-eslint/utils": "7.7.1", + "@typescript-eslint/visitor-keys": "7.7.1", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.3.1", @@ -2572,29 +2572,14 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.0.tgz", - "integrity": "sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.1.tgz", + "integrity": "sha512-PytBif2SF+9SpEUKynYn5g1RHFddJUcyynGpztX3l/ik7KmZEv19WCMhUBkHXPU9es/VWGD3/zg3wg90+Dh2rA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.0", - "@typescript-eslint/visitor-keys": "7.7.0" + "@typescript-eslint/types": "7.7.1", + "@typescript-eslint/visitor-keys": "7.7.1" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2605,13 +2590,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.0.tgz", - "integrity": "sha512-bOp3ejoRYrhAlnT/bozNQi3nio9tIgv3U5C0mVDdZC7cpcQEDZXvq8inrHYghLVwuNABRqrMW5tzAv88Vy77Sg==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.1.tgz", + "integrity": "sha512-ZksJLW3WF7o75zaBPScdW1Gbkwhd/lyeXGf1kQCxJaOeITscoSl0MjynVvCzuV5boUz/3fOI06Lz8La55mu29Q==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.7.0", - "@typescript-eslint/utils": "7.7.0", + "@typescript-eslint/typescript-estree": "7.7.1", + "@typescript-eslint/utils": "7.7.1", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -2632,9 +2617,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.0.tgz", - "integrity": "sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.1.tgz", + "integrity": "sha512-AmPmnGW1ZLTpWa+/2omPrPfR7BcbUU4oha5VIbSbS1a1Tv966bklvLNXxp3mrbc+P2j4MNOTfDffNsk4o0c6/w==", "dev": true, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2645,13 +2630,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.0.tgz", - "integrity": "sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.1.tgz", + "integrity": "sha512-CXe0JHCXru8Fa36dteXqmH2YxngKJjkQLjxzoj6LYwzZ7qZvgsLSc+eqItCrqIop8Vl2UKoAi0StVWu97FQZIQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.0", - "@typescript-eslint/visitor-keys": "7.7.0", + "@typescript-eslint/types": "7.7.1", + "@typescript-eslint/visitor-keys": "7.7.1", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2672,33 +2657,18 @@ } } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@typescript-eslint/utils": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.0.tgz", - "integrity": "sha512-LKGAXMPQs8U/zMRFXDZOzmMKgFv3COlxUQ+2NMPhbqgVm6R1w+nU1i4836Pmxu9jZAuIeyySNrN/6Rc657ggig==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.1.tgz", + "integrity": "sha512-QUvBxPEaBXf41ZBbaidKICgVL8Hin0p6prQDu6bbetWo39BKbWJxRsErOzMNT1rXvTll+J7ChrbmMCXM9rsvOQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.15", "@types/semver": "^7.5.8", - "@typescript-eslint/scope-manager": "7.7.0", - "@typescript-eslint/types": "7.7.0", - "@typescript-eslint/typescript-estree": "7.7.0", + "@typescript-eslint/scope-manager": "7.7.1", + "@typescript-eslint/types": "7.7.1", + "@typescript-eslint/typescript-estree": "7.7.1", "semver": "^7.6.0" }, "engines": { @@ -2713,12 +2683,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.0.tgz", - "integrity": "sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.1.tgz", + "integrity": "sha512-gBL3Eq25uADw1LQ9kVpf3hRM+DWzs0uZknHYK3hq4jcTPqVCClHGDnB6UUUV2SFeBeA4KWHWbbLqmbGcZ4FYbw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.0", + "@typescript-eslint/types": "7.7.1", "eslint-visitor-keys": "^3.4.3" }, "engines": { @@ -5112,6 +5082,21 @@ "node": ">=6" } }, + "node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", diff --git a/playground/package.json b/playground/package.json index e3d100d786..6101569925 100644 --- a/playground/package.json +++ b/playground/package.json @@ -20,7 +20,7 @@ "@types/react": "^18.2.79", "@types/react-dom": "^18.2.25", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/eslint-plugin": "^7.7.1", "@typescript-eslint/parser": "^7.7.0", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", From 13c4c9d0b51ee60ca5512310478dc24be91f5b0d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 18:41:40 -0400 Subject: [PATCH 41/49] bot: Bump @typescript-eslint/parser from 7.7.0 to 7.7.1 in /playground (#2550) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 7.7.0 to 7.7.1.
Release notes

Sourced from @​typescript-eslint/parser's releases.

v7.7.1

7.7.1 (2024-04-22)

🩹 Fixes

  • eslint-plugin: [no-unsafe-assignment] handle shorthand property assignment (#8800)
  • eslint-plugin: [explicit-function-return-type] fix checking wrong ancestor's return type (#8809)
  • eslint-plugin: [prefer-optional-chain] only look at left operand for requireNullish (#8559)
  • eslint-plugin: [no-for-in-array] refine report location (#8874)
  • eslint-plugin: [no-unnecessary-type-assertion] allow non-null assertion for void type (#8912)

❤️ Thank You

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/parser's changelog.

7.7.1 (2024-04-22)

This was a version bump only for parser to align it with other projects, there were no code changes.

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/parser&package-manager=npm_and_yarn&previous-version=7.7.0&new-version=7.7.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 91 ++++-------------------------------- playground/package.json | 2 +- 2 files changed, 9 insertions(+), 84 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 351001f613..4860a7f750 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,7 +19,7 @@ "@types/react-dom": "^18.2.25", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.7.1", - "@typescript-eslint/parser": "^7.7.0", + "@typescript-eslint/parser": "^7.7.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -2470,15 +2470,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.0.tgz", - "integrity": "sha512-fNcDm3wSwVM8QYL4HKVBggdIPAy9Q41vcvC/GtDobw3c4ndVT3K6cqudUmjHPw8EAp4ufax0o58/xvWaP2FmTg==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.1.tgz", + "integrity": "sha512-vmPzBOOtz48F6JAGVS/kZYk4EkXao6iGrD838sp1w3NQQC0W8ry/q641KU4PrG7AKNAf56NOcR8GOpH8l9FPCw==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.7.0", - "@typescript-eslint/types": "7.7.0", - "@typescript-eslint/typescript-estree": "7.7.0", - "@typescript-eslint/visitor-keys": "7.7.0", + "@typescript-eslint/scope-manager": "7.7.1", + "@typescript-eslint/types": "7.7.1", + "@typescript-eslint/typescript-estree": "7.7.1", + "@typescript-eslint/visitor-keys": "7.7.1", "debug": "^4.3.4" }, "engines": { @@ -2497,81 +2497,6 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.0.tgz", - "integrity": "sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "7.7.0", - "@typescript-eslint/visitor-keys": "7.7.0" - }, - "engines": { - "node": "^18.18.0 || >=20.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.0.tgz", - "integrity": "sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==", - "dev": true, - "engines": { - "node": "^18.18.0 || >=20.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.0.tgz", - "integrity": "sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "7.7.0", - "@typescript-eslint/visitor-keys": "7.7.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || >=20.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.0.tgz", - "integrity": "sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "7.7.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || >=20.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/scope-manager": { "version": "7.7.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.1.tgz", diff --git a/playground/package.json b/playground/package.json index 6101569925..2276fc2d31 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.2.25", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.7.1", - "@typescript-eslint/parser": "^7.7.0", + "@typescript-eslint/parser": "^7.7.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", "eslint-plugin-react-hooks": "^4.6.0", From d0235ef8fb6c515b4ea93534128285eda809ba62 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Tue, 30 Apr 2024 13:57:56 -0400 Subject: [PATCH 42/49] test(i): Fix PnCounter overflow tests (#2567) ## Relevant issue(s) Resolves #2565 ## Description Fixes a faulty PnCounter overflow test which was doing a no-op whilst pretending to increment. Adds a test for negative float64 overflows, and two tests documenting the newly found https://github.com/sourcenetwork/defradb/issues/2566 in a new `issues` directory. --- tests/integration/issues/2566_test.go | 205 ++++++++++++++++++ tests/integration/issues/2569_test.go | 167 ++++++++++++++ tests/integration/issues/README.md | 5 + .../mutation/update/crdt/pncounter_test.go | 113 +++++++++- tests/integration/test_case.go | 7 + tests/integration/utils2.go | 32 +++ 6 files changed, 524 insertions(+), 5 deletions(-) create mode 100644 tests/integration/issues/2566_test.go create mode 100644 tests/integration/issues/2569_test.go create mode 100644 tests/integration/issues/README.md diff --git a/tests/integration/issues/2566_test.go b/tests/integration/issues/2566_test.go new file mode 100644 index 0000000000..696425a3b1 --- /dev/null +++ b/tests/integration/issues/2566_test.go @@ -0,0 +1,205 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package issues + +import ( + "fmt" + "math" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// This test documents https://github.com/sourcenetwork/defradb/issues/2566 +func TestP2PUpdate_WithPNCounterSimultaneousOverflowIncrement_DoesNotReachConsitency(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This test only supports the Go client at the moment due to + // https://github.com/sourcenetwork/defradb/issues/2569 + testUtils.GoClientType, + }, + ), + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + // Create John on all nodes + Doc: fmt.Sprintf(`{ + "Name": "John", + "Age": %g + }`, math.MaxFloat64/10), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: fmt.Sprintf(`{ + "Age": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(1), + Doc: fmt.Sprintf(`{ + "Age": %g + }`, -math.MaxFloat64), + }, + testUtils.ConnectPeers{ + // Configure the peer connection after the document has been created and updated independently + // on each node. This allows us to be sure which update was applied on each node. + // If the connection was configured before the updates there would be a race condition resulting + // in a variable resultant state. + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + // This is an arbitrary update on both nodes to force the sync of the document created + // before the peer connection was configured. + Doc: `{ + "Name": "Fred" + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + // Node 0 overflows before subtraction, and because subtracting from infinity + // results in infinity the value remains infinate + "Age": math.Inf(1), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + // Node 1 subtracts before adding, meaning no overflow is achieved and the value + // remains finate + "Age": float64(1.7976931348623155e+307), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents https://github.com/sourcenetwork/defradb/issues/2566 +func TestP2PUpdate_WithPNCounterSimultaneousOverflowDecrement_DoesNotReachConsitency(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This test only supports the Go client at the moment due to + // https://github.com/sourcenetwork/defradb/issues/2569 + testUtils.GoClientType, + }, + ), + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + // Create John on all nodes + Doc: fmt.Sprintf(`{ + "Name": "John", + "Age": %g + }`, -math.MaxFloat64/10), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(1), + Doc: fmt.Sprintf(`{ + "Age": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: fmt.Sprintf(`{ + "Age": %g + }`, -math.MaxFloat64), + }, + testUtils.ConnectPeers{ + // Configure the peer connection after the document has been created and updated independently + // on each node. This allows us to be sure which update was applied on each node. + // If the connection was configured before the updates there would be a race condition resulting + // in a variable resultant state. + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + // This is an arbitrary update on both nodes to force the sync of the document created + // before the peer connection was configured. + Doc: `{ + "Name": "Fred" + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + // Node 0 overflows before addition, and because adding to infinity + // results in infinity the value remains infinate + "Age": math.Inf(-1), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + // Node 1 adds before subtracting, meaning no overflow is achieved and the value + // remains finate + "Age": float64(-1.7976931348623155e+307), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/issues/2569_test.go b/tests/integration/issues/2569_test.go new file mode 100644 index 0000000000..2d942177d6 --- /dev/null +++ b/tests/integration/issues/2569_test.go @@ -0,0 +1,167 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package issues + +import ( + "fmt" + "math" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// These tests document https://github.com/sourcenetwork/defradb/issues/2569 + +func TestP2PUpdate_WithPNCounterFloatOverflowIncrement_PreventsQuerying(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This issue only affects the http and the cli clients + testUtils.HTTPClientType, + testUtils.CLIClientType, + }, + ), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + // Overflow the points field, this results in a value of `math.Inf(1)` + Doc: fmt.Sprintf(`{ + "points": %g + }`, math.MaxFloat64/10), + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + ExpectedError: "unexpected end of JSON input", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PUpdate_WithPNCounterFloatOverflowDecrement_PreventsQuerying(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This issue only affects the http and the cli clients + testUtils.HTTPClientType, + testUtils.CLIClientType, + }, + ), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, -math.MaxFloat64), + }, + testUtils.UpdateDoc{ + // Overflow the points field, this results in a value of `math.Inf(-1)` + Doc: fmt.Sprintf(`{ + "points": %g + }`, -math.MaxFloat64/10), + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + ExpectedError: "unexpected end of JSON input", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PUpdate_WithPNCounterFloatOverflow_PreventsCollectionGet(t *testing.T) { + test := testUtils.TestCase{ + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This issue only affects the http and the cli clients + testUtils.HTTPClientType, + testUtils.CLIClientType, + }, + ), + SupportedMutationTypes: immutable.Some( + []testUtils.MutationType{ + // We limit the test to Collection mutation calls, as the test framework + // will make a `Get` call before submitting the document, which is where the error + // will surface (not the update itelf) + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }, + ), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + // Overflow the points field, this results in a value of `math.Inf(1)` + Doc: fmt.Sprintf(`{ + "points": %g + }`, math.MaxFloat64/10), + }, + testUtils.UpdateDoc{ + // Try and update the document again, the value used does not matter. + Doc: `{ + "points": 1 + }`, + // WARNING: This error is just an artifact of our test harness, what actually happens + // is the test harness calls `collection.Get`, which returns an empty string and no error. + ExpectedError: "cannot parse JSON: cannot parse empty string", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/issues/README.md b/tests/integration/issues/README.md new file mode 100644 index 0000000000..c9405853bc --- /dev/null +++ b/tests/integration/issues/README.md @@ -0,0 +1,5 @@ +# Issues + +This directory hosts tests documenting known issues. Test files are named after their corresponding GitHub issue ("\[IssueNumber\]_test.go"). + +Ideally the only file in this directory would be this readme. diff --git a/tests/integration/mutation/update/crdt/pncounter_test.go b/tests/integration/mutation/update/crdt/pncounter_test.go index fe350ab852..534bd406dc 100644 --- a/tests/integration/mutation/update/crdt/pncounter_test.go +++ b/tests/integration/mutation/update/crdt/pncounter_test.go @@ -174,10 +174,16 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrement_ShouldIncrement(t *testi } // This test documents what happens when an overflow occurs in a PN Counter with Float type. -// In this case it is the same as a no-op. -func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing.T) { +func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_PositiveInf(t *testing.T) { test := testUtils.TestCase{ - Description: "Positive increments of a PN Counter with Float type and overflow causing a no-op", + Description: "Positive increments of a PN Counter with Float type and overflow", + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This test only supports the Go client at the moment due to + // https://github.com/sourcenetwork/defradb/issues/2569 + testUtils.GoClientType, + }, + ), Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -195,8 +201,105 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing. }, testUtils.UpdateDoc{ DocID: 0, + Doc: fmt.Sprintf(`{ + "points": %g + }`, math.MaxFloat64/10), + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": math.Inf(1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents what happens when an overflow occurs in a PN Counter with Float type. +func TestPNCounterUpdate_FloatKindWithDecrementOverflow_NegativeInf(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a PN Counter with Float type and overflow", + SupportedClientTypes: immutable.Some( + []testUtils.ClientType{ + // This test only supports the Go client at the moment due to + // https://github.com/sourcenetwork/defradb/issues/2569 + testUtils.GoClientType, + }, + ), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, -math.MaxFloat64), + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: fmt.Sprintf(`{ + "points": %g + }`, -math.MaxFloat64/10), + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": math.Inf(-1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestPNCounterUpdate_FloatKindWithPositiveIncrementInsignificantValue_DoesNothing(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a PN Counter with Float type and an insignificant value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64/10), + }, + testUtils.UpdateDoc{ + // `1` is insignificant to a large float64 and adding it to the large value + // should not result in a value change Doc: `{ - "points": 1000 + "points": 1 }`, }, testUtils.Request{ @@ -209,7 +312,7 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing. Results: []map[string]any{ { "name": "John", - "points": math.MaxFloat64, + "points": math.MaxFloat64 / 10, }, }, }, diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 8d9315e4fa..fa1629d0ef 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -38,6 +38,13 @@ type TestCase struct { // This is to only be used in the very rare cases where we really do want behavioural // differences between mutation types, or we need to temporarily document a bug. SupportedMutationTypes immutable.Option[[]MutationType] + + // If provided a value, SupportedClientTypes will limit the client types under test to those + // within this set. If no active clients pass this filter the test will be skipped. + // + // This is to only be used in the very rare cases where we really do want behavioural + // differences between client types, or we need to temporarily document a bug. + SupportedClientTypes immutable.Option[[]ClientType] } // SetupComplete is a flag to explicitly notify the change detector at which point diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index a52074e72e..4821d06b32 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -170,6 +170,8 @@ func ExecuteTestCase( require.NotEmpty(t, databases) require.NotEmpty(t, clients) + clients = skipIfClientTypeUnsupported(t, clients, testCase.SupportedClientTypes) + ctx := context.Background() for _, ct := range clients { for _, dbt := range databases { @@ -1994,6 +1996,36 @@ func skipIfMutationTypeUnsupported(t *testing.T, supportedMutationTypes immutabl } } +// skipIfClientTypeUnsupported returns a new set of client types that match the given supported set. +// +// If supportedClientTypes is none no filtering will take place and the input client set will be returned. +// If the resultant filtered set is empty the test will be skipped. +func skipIfClientTypeUnsupported( + t *testing.T, + clients []ClientType, + supportedClientTypes immutable.Option[[]ClientType], +) []ClientType { + if !supportedClientTypes.HasValue() { + return clients + } + + filteredClients := []ClientType{} + for _, supportedMutationType := range supportedClientTypes.Value() { + for _, client := range clients { + if supportedMutationType == client { + filteredClients = append(filteredClients, client) + break + } + } + } + + if len(filteredClients) == 0 { + t.Skipf("test does not support any given client type. Type: %v", supportedClientTypes) + } + + return filteredClients +} + // skipIfNetworkTest skips the current test if the given actions // contain network actions and skipNetworkTests is true. func skipIfNetworkTest(t *testing.T, actions []any) { From 3b90e8babd01a6e8bc208c37385ea65efbb5917a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 30 Apr 2024 17:19:27 -0400 Subject: [PATCH 43/49] bot: Update dependencies (bulk dependabot PRs) 30-04-2024 (#2570) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #2564 bot: Bump go.opentelemetry.io/otel/metric from 1.25.0 to 1.26.0 #2562 bot: Bump swagger-ui-react from 5.17.0 to 5.17.2 in /playground #2561 bot: Bump react and @types/react in /playground #2560 bot: Bump vite from 5.2.8 to 5.2.10 in /playground #2559 bot: Bump eslint-plugin-react-hooks from 4.6.0 to 4.6.2 in /playground ⚠️ The following PRs were resolved manually due to merge conflicts: #2563 bot: Bump go.opentelemetry.io/otel/sdk/metric from 1.25.0 to 1.26.0 #2558 bot: Bump react-dom and @types/react-dom in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 10 +- go.sum | 20 +- playground/package-lock.json | 1414 ++++++++-------------------------- playground/package.json | 14 +- 4 files changed, 351 insertions(+), 1107 deletions(-) diff --git a/go.mod b/go.mod index 0bfa96d5b9..9d236c86de 100644 --- a/go.mod +++ b/go.mod @@ -44,8 +44,8 @@ require ( github.com/ugorji/go/codec v1.2.12 github.com/valyala/fastjson v1.6.4 github.com/vito/go-sse v1.0.0 - go.opentelemetry.io/otel/metric v1.25.0 - go.opentelemetry.io/otel/sdk/metric v1.25.0 + go.opentelemetry.io/otel/metric v1.26.0 + go.opentelemetry.io/otel/sdk/metric v1.26.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 google.golang.org/grpc v1.63.2 @@ -283,9 +283,9 @@ require ( github.com/zondax/ledger-go v0.14.3 // indirect go.etcd.io/bbolt v1.3.8 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/otel v1.25.0 // indirect - go.opentelemetry.io/otel/sdk v1.25.0 // indirect - go.opentelemetry.io/otel/trace v1.25.0 // indirect + go.opentelemetry.io/otel v1.26.0 // indirect + go.opentelemetry.io/otel/sdk v1.26.0 // indirect + go.opentelemetry.io/otel/trace v1.26.0 // indirect go.uber.org/dig v1.17.1 // indirect go.uber.org/fx v1.20.1 // indirect go.uber.org/mock v0.4.0 // indirect diff --git a/go.sum b/go.sum index c7ac5763e2..cbe3eea36c 100644 --- a/go.sum +++ b/go.sum @@ -1185,16 +1185,16 @@ go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/otel v1.25.0 h1:gldB5FfhRl7OJQbUHt/8s0a7cE8fbsPAtdpRaApKy4k= -go.opentelemetry.io/otel v1.25.0/go.mod h1:Wa2ds5NOXEMkCmUou1WA7ZBfLTHWIsp034OVD7AO+Vg= -go.opentelemetry.io/otel/metric v1.25.0 h1:LUKbS7ArpFL/I2jJHdJcqMGxkRdxpPHE0VU/D4NuEwA= -go.opentelemetry.io/otel/metric v1.25.0/go.mod h1:rkDLUSd2lC5lq2dFNrX9LGAbINP5B7WBkC78RXCpH5s= -go.opentelemetry.io/otel/sdk v1.25.0 h1:PDryEJPC8YJZQSyLY5eqLeafHtG+X7FWnf3aXMtxbqo= -go.opentelemetry.io/otel/sdk v1.25.0/go.mod h1:oFgzCM2zdsxKzz6zwpTZYLLQsFwc+K0daArPdIhuxkw= -go.opentelemetry.io/otel/sdk/metric v1.25.0 h1:7CiHOy08LbrxMAp4vWpbiPcklunUshVpAvGBrdDRlGw= -go.opentelemetry.io/otel/sdk/metric v1.25.0/go.mod h1:LzwoKptdbBBdYfvtGCzGwk6GWMA3aUzBOwtQpR6Nz7o= -go.opentelemetry.io/otel/trace v1.25.0 h1:tqukZGLwQYRIFtSQM2u2+yfMVTgGVeqRLPUYx1Dq6RM= -go.opentelemetry.io/otel/trace v1.25.0/go.mod h1:hCCs70XM/ljO+BeQkyFnbK28SBIJ/Emuha+ccrCRT7I= +go.opentelemetry.io/otel v1.26.0 h1:LQwgL5s/1W7YiiRwxf03QGnWLb2HW4pLiAhaA5cZXBs= +go.opentelemetry.io/otel v1.26.0/go.mod h1:UmLkJHUAidDval2EICqBMbnAd0/m2vmpf/dAM+fvFs4= +go.opentelemetry.io/otel/metric v1.26.0 h1:7S39CLuY5Jgg9CrnA9HHiEjGMF/X2VHvoXGgSllRz30= +go.opentelemetry.io/otel/metric v1.26.0/go.mod h1:SY+rHOI4cEawI9a7N1A4nIg/nTQXe1ccCNWYOJUrpX4= +go.opentelemetry.io/otel/sdk v1.26.0 h1:Y7bumHf5tAiDlRYFmGqetNcLaVUZmh4iYfmGxtmz7F8= +go.opentelemetry.io/otel/sdk v1.26.0/go.mod h1:0p8MXpqLeJ0pzcszQQN4F0S5FVjBLgypeGSngLsmirs= +go.opentelemetry.io/otel/sdk/metric v1.26.0 h1:cWSks5tfriHPdWFnl+qpX3P681aAYqlZHcAyHw5aU9Y= +go.opentelemetry.io/otel/sdk/metric v1.26.0/go.mod h1:ClMFFknnThJCksebJwz7KIyEDHO+nTB6gK8obLy8RyE= +go.opentelemetry.io/otel/trace v1.26.0 h1:1ieeAUb4y0TE26jUFrCIXKpTuVK7uJGN9/Z/2LP5sQA= +go.opentelemetry.io/otel/trace v1.26.0/go.mod h1:4iDxvGDQuUkHve82hJJ8UqrwswHYsZuWCBllGV2U2y0= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= diff --git a/playground/package-lock.json b/playground/package-lock.json index 4860a7f750..8bb2cafd7b 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -10,37 +10,28 @@ "dependencies": { "graphiql": "^3.2.0", "graphql": "^16.8.1", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "swagger-ui-react": "^5.17.0" + "react": "^18.3.1", + "react-dom": "^18.3.1", + "swagger-ui-react": "^5.17.2" }, "devDependencies": { - "@types/react": "^18.2.79", - "@types/react-dom": "^18.2.25", + "@types/react": "^18.3.1", + "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.7.1", "@typescript-eslint/parser": "^7.7.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", - "eslint-plugin-react-hooks": "^4.6.0", + "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.6", "typescript": "^5.4.5", - "vite": "^5.2.8" - } - }, - "node_modules/@aashutoshrathi/word-wrap": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", - "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "vite": "^5.2.10" } }, "node_modules/@babel/runtime": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.0.tgz", - "integrity": "sha512-Chk32uHMg6TnQdvw2e9IlqPpFX/6NLuK0Ys2PqLb7/gL5uFn9mXvK715FGLlOLQrcO4qIkNHkvPGktzzXexsFw==", + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.5.tgz", + "integrity": "sha512-Nms86NXrsaeU9vbBJKni6gXiEXZ4CVpYVzEjDH9Sb8vmZ3UljyA1GSOJl/6LGPO8EHLuSF9H+IxNXHPX8QHJ4g==", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -49,9 +40,9 @@ } }, "node_modules/@babel/runtime-corejs3": { - "version": "7.24.4", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.24.4.tgz", - "integrity": "sha512-VOQOexSilscN24VEY810G/PqtpFvx/z6UqDIjIWbDe2368HhDLkYN5TYwaEz/+eRCUkhJ2WaNLLmQAlxzfWj4w==", + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.24.5.tgz", + "integrity": "sha512-GWO0mgzNMLWaSYM4z4NVIuY0Cd1fl8cPnuetuddu5w/qGuvt5Y7oUi/kvvQGK9xgOkFJDQX2heIvTRn/OQ1XTg==", "dependencies": { "core-js-pure": "^3.30.2", "regenerator-runtime": "^0.14.0" @@ -86,9 +77,9 @@ "peer": true }, "node_modules/@codemirror/view": { - "version": "6.26.1", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.26.1.tgz", - "integrity": "sha512-wLw0t3R9AwOSQThdZ5Onw8QQtem5asE7+bPlnzc57eubPqiuJKIzwjMZ+C42vQett+iva+J8VgFV4RYWDBh5FA==", + "version": "6.26.3", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.26.3.tgz", + "integrity": "sha512-gmqxkPALZjkgSxIeeweY/wGQXBfwTUaLs8h7OKtSwfbj9Ct3L11lD+u1sS7XHppxFQoMDiMDp07P9f3I2jWOHw==", "peer": true, "dependencies": { "@codemirror/state": "^6.4.0", @@ -558,28 +549,28 @@ } }, "node_modules/@floating-ui/core": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.0.tgz", - "integrity": "sha512-PcF++MykgmTj3CIyOQbKA/hDzOAiqI3mhuoN44WRCopIs1sgoDoU4oty4Jtqaj/y3oDU6fnVSm4QG0a3t5i0+g==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.1.tgz", + "integrity": "sha512-42UH54oPZHPdRHdw6BgoBD6cg/eVTmVrFcgeRDM3jbO7uxSoipVcmcIGFcA5jmOHO5apcyvBhkSKES3fQJnu7A==", "dependencies": { - "@floating-ui/utils": "^0.2.1" + "@floating-ui/utils": "^0.2.0" } }, "node_modules/@floating-ui/dom": { - "version": "1.6.3", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.3.tgz", - "integrity": "sha512-RnDthu3mzPlQ31Ss/BTwQ1zjzIhr3lk1gZB1OC56h/1vEtaXkESrOqL5fQVMfXpwGtRwX+YsZBdyHtJMQnkArw==", + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.4.tgz", + "integrity": "sha512-0G8R+zOvQsAG1pg2Q99P21jiqxqGBW1iRe/iXHsBRBxnpXKFI8QwbB4x5KmYLggNO5m34IQgOIu9SCRfR/WWiQ==", "dependencies": { "@floating-ui/core": "^1.0.0", "@floating-ui/utils": "^0.2.0" } }, "node_modules/@floating-ui/react-dom": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.8.tgz", - "integrity": "sha512-HOdqOt3R3OGeTKidaLvJKcgg75S6tibQ3Tif4eyd91QnIJWr0NLvoXFpJA/j8HqkFSL68GDca9AuyWEHlhyClw==", + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.9.tgz", + "integrity": "sha512-q0umO0+LQK4+p6aGyvzASqKbKOJcAHJ7ycE9CuUvfx3s9zTHWmGJTPOIlM/hmSBfUfg/XfY5YhLBLR/LHwShQQ==", "dependencies": { - "@floating-ui/dom": "^1.6.1" + "@floating-ui/dom": "^1.0.0" }, "peerDependencies": { "react": ">=16.8.0", @@ -587,9 +578,9 @@ } }, "node_modules/@floating-ui/utils": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.1.tgz", - "integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==" + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.2.tgz", + "integrity": "sha512-J4yDIIthosAsRZ5CPYP/jQvUAQtlZTTD/4suA08/FEnlxqW3sKS9iAhgsa9VYLZ6vDHn/ixJgIqRQPotoBjxIw==" }, "node_modules/@graphiql/react": { "version": "0.21.0", @@ -637,9 +628,9 @@ } }, "node_modules/@headlessui/react": { - "version": "1.7.18", - "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.18.tgz", - "integrity": "sha512-4i5DOrzwN4qSgNsL4Si61VMkUcWbcSKueUV7sFhpHzQcSShdlHENE5+QBntMSRvHt8NyoFO2AGG8si9lq+w4zQ==", + "version": "1.7.19", + "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.19.tgz", + "integrity": "sha512-Ll+8q3OlMJfJbAKM/+/Y2q6PPYbryqNTXDbryx7SXLIDamkF6iQFbriYHga0dY44PvDhvvBWCx1Xj4U5+G4hOw==", "dependencies": { "@tanstack/react-virtual": "^3.0.0-beta.60", "client-only": "^0.0.1" @@ -702,9 +693,9 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", - "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", "dev": true }, "node_modules/@lezer/common": { @@ -1455,9 +1446,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.13.2.tgz", - "integrity": "sha512-3XFIDKWMFZrMnao1mJhnOT1h2g0169Os848NhhmGweEcfJ4rCi+3yMCOLG4zA61rbJdkcrM/DjVZm9Hg5p5w7g==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.17.2.tgz", + "integrity": "sha512-NM0jFxY8bB8QLkoKxIQeObCaDlJKewVlIEkuyYKm5An1tdVZ966w2+MPQ2l8LBZLjR+SgyV+nRkTIunzOYBMLQ==", "cpu": [ "arm" ], @@ -1468,9 +1459,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.13.2.tgz", - "integrity": "sha512-GdxxXbAuM7Y/YQM9/TwwP+L0omeE/lJAR1J+olu36c3LqqZEBdsIWeQ91KBe6nxwOnb06Xh7JS2U5ooWU5/LgQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.17.2.tgz", + "integrity": "sha512-yeX/Usk7daNIVwkq2uGoq2BYJKZY1JfyLTaHO/jaiSwi/lsf8fTFoQW/n6IdAsx5tx+iotu2zCJwz8MxI6D/Bw==", "cpu": [ "arm64" ], @@ -1481,9 +1472,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.13.2.tgz", - "integrity": "sha512-mCMlpzlBgOTdaFs83I4XRr8wNPveJiJX1RLfv4hggyIVhfB5mJfN4P8Z6yKh+oE4Luz+qq1P3kVdWrCKcMYrrA==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.17.2.tgz", + "integrity": "sha512-kcMLpE6uCwls023+kknm71ug7MZOrtXo+y5p/tsg6jltpDtgQY1Eq5sGfHcQfb+lfuKwhBmEURDga9N0ol4YPw==", "cpu": [ "arm64" ], @@ -1494,9 +1485,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.13.2.tgz", - "integrity": "sha512-yUoEvnH0FBef/NbB1u6d3HNGyruAKnN74LrPAfDQL3O32e3k3OSfLrPgSJmgb3PJrBZWfPyt6m4ZhAFa2nZp2A==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.17.2.tgz", + "integrity": "sha512-AtKwD0VEx0zWkL0ZjixEkp5tbNLzX+FCqGG1SvOu993HnSz4qDI6S4kGzubrEJAljpVkhRSlg5bzpV//E6ysTQ==", "cpu": [ "x64" ], @@ -1507,9 +1498,22 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.13.2.tgz", - "integrity": "sha512-GYbLs5ErswU/Xs7aGXqzc3RrdEjKdmoCrgzhJWyFL0r5fL3qd1NPcDKDowDnmcoSiGJeU68/Vy+OMUluRxPiLQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.17.2.tgz", + "integrity": "sha512-3reX2fUHqN7sffBNqmEyMQVj/CKhIHZd4y631duy0hZqI8Qoqf6lTtmAKvJFYa6bhU95B1D0WgzHkmTg33In0A==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.17.2.tgz", + "integrity": "sha512-uSqpsp91mheRgw96xtyAGP9FW5ChctTFEoXP0r5FAzj/3ZRv3Uxjtc7taRQSaQM/q85KEKjKsZuiZM3GyUivRg==", "cpu": [ "arm" ], @@ -1520,9 +1524,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.13.2.tgz", - "integrity": "sha512-L1+D8/wqGnKQIlh4Zre9i4R4b4noxzH5DDciyahX4oOz62CphY7WDWqJoQ66zNR4oScLNOqQJfNSIAe/6TPUmQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.17.2.tgz", + "integrity": "sha512-EMMPHkiCRtE8Wdk3Qhtciq6BndLtstqZIroHiiGzB3C5LDJmIZcSzVtLRbwuXuUft1Cnv+9fxuDtDxz3k3EW2A==", "cpu": [ "arm64" ], @@ -1533,9 +1537,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.13.2.tgz", - "integrity": "sha512-tK5eoKFkXdz6vjfkSTCupUzCo40xueTOiOO6PeEIadlNBkadH1wNOH8ILCPIl8by/Gmb5AGAeQOFeLev7iZDOA==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.17.2.tgz", + "integrity": "sha512-NMPylUUZ1i0z/xJUIx6VUhISZDRT+uTWpBcjdv0/zkp7b/bQDF+NfnfdzuTiB1G6HTodgoFa93hp0O1xl+/UbA==", "cpu": [ "arm64" ], @@ -1546,11 +1550,11 @@ ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.13.2.tgz", - "integrity": "sha512-zvXvAUGGEYi6tYhcDmb9wlOckVbuD+7z3mzInCSTACJ4DQrdSLPNUeDIcAQW39M3q6PDquqLWu7pnO39uSMRzQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.17.2.tgz", + "integrity": "sha512-T19My13y8uYXPw/L/k0JYaX1fJKFT/PWdXiHr8mTbXWxjVF1t+8Xl31DgBBvEKclw+1b00Chg0hxE2O7bTG7GQ==", "cpu": [ - "ppc64le" + "ppc64" ], "dev": true, "optional": true, @@ -1559,9 +1563,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.13.2.tgz", - "integrity": "sha512-C3GSKvMtdudHCN5HdmAMSRYR2kkhgdOfye4w0xzyii7lebVr4riCgmM6lRiSCnJn2w1Xz7ZZzHKuLrjx5620kw==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.17.2.tgz", + "integrity": "sha512-BOaNfthf3X3fOWAB+IJ9kxTgPmMqPPH5f5k2DcCsRrBIbWnaJCgX2ll77dV1TdSy9SaXTR5iDXRL8n7AnoP5cg==", "cpu": [ "riscv64" ], @@ -1572,9 +1576,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.13.2.tgz", - "integrity": "sha512-l4U0KDFwzD36j7HdfJ5/TveEQ1fUTjFFQP5qIt9gBqBgu1G8/kCaq5Ok05kd5TG9F8Lltf3MoYsUMw3rNlJ0Yg==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.17.2.tgz", + "integrity": "sha512-W0UP/x7bnn3xN2eYMql2T/+wpASLE5SjObXILTMPUBDB/Fg/FxC+gX4nvCfPBCbNhz51C+HcqQp2qQ4u25ok6g==", "cpu": [ "s390x" ], @@ -1585,9 +1589,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.13.2.tgz", - "integrity": "sha512-xXMLUAMzrtsvh3cZ448vbXqlUa7ZL8z0MwHp63K2IIID2+DeP5iWIT6g1SN7hg1VxPzqx0xZdiDM9l4n9LRU1A==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.17.2.tgz", + "integrity": "sha512-Hy7pLwByUOuyaFC6mAr7m+oMC+V7qyifzs/nW2OJfC8H4hbCzOX07Ov0VFk/zP3kBsELWNFi7rJtgbKYsav9QQ==", "cpu": [ "x64" ], @@ -1598,9 +1602,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.13.2.tgz", - "integrity": "sha512-M/JYAWickafUijWPai4ehrjzVPKRCyDb1SLuO+ZyPfoXgeCEAlgPkNXewFZx0zcnoIe3ay4UjXIMdXQXOZXWqA==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.17.2.tgz", + "integrity": "sha512-h1+yTWeYbRdAyJ/jMiVw0l6fOOm/0D1vNLui9iPuqgRGnXA0u21gAqOyB5iHjlM9MMfNOm9RHCQ7zLIzT0x11Q==", "cpu": [ "x64" ], @@ -1611,9 +1615,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.13.2.tgz", - "integrity": "sha512-2YWwoVg9KRkIKaXSh0mz3NmfurpmYoBBTAXA9qt7VXk0Xy12PoOP40EFuau+ajgALbbhi4uTj3tSG3tVseCjuA==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.17.2.tgz", + "integrity": "sha512-tmdtXMfKAjy5+IQsVtDiCfqbynAQE/TQRpWdVataHmhMb9DCoJxp9vLcCBjEQWMiUYxO1QprH/HbY9ragCEFLA==", "cpu": [ "arm64" ], @@ -1624,9 +1628,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.13.2.tgz", - "integrity": "sha512-2FSsE9aQ6OWD20E498NYKEQLneShWes0NGMPQwxWOdws35qQXH+FplabOSP5zEe1pVjurSDOGEVCE2agFwSEsw==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.17.2.tgz", + "integrity": "sha512-7II/QCSTAHuE5vdZaQEwJq2ZACkBpQDOmQsE6D6XUbnBHW8IAhm4eTufL6msLJorzrHDFv3CF8oCA/hSIRuZeQ==", "cpu": [ "ia32" ], @@ -1637,9 +1641,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.13.2.tgz", - "integrity": "sha512-7h7J2nokcdPePdKykd8wtc8QqqkqxIrUz7MHj6aNr8waBRU//NLDVnNjQnqQO6fqtjrtCdftpbTuOKAyrAQETQ==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.17.2.tgz", + "integrity": "sha512-TGGO7v7qOq4CYmSBVEYpI1Y5xDuCEnbVC5Vth8mOsW0gDSzxNrVERPc790IGHsrT2dQSimgMr9Ub3Y1Jci5/8w==", "cpu": [ "x64" ], @@ -2107,9 +2111,9 @@ } }, "node_modules/@swc/core": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.6.tgz", - "integrity": "sha512-A7iK9+1qzTCIuc3IYcS8gPHCm9bZVKUJrfNnwveZYyo6OFp3jLno4WOM2yBy5uqedgYATEiWgBYHKq37KrU6IA==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.17.tgz", + "integrity": "sha512-tq+mdWvodMBNBBZbwFIMTVGYHe9N7zvEaycVVjfvAx20k1XozHbHhRv+9pEVFJjwRxLdXmtvFZd3QZHRAOpoNQ==", "dev": true, "hasInstallScript": true, "dependencies": { @@ -2124,16 +2128,16 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.4.6", - "@swc/core-darwin-x64": "1.4.6", - "@swc/core-linux-arm-gnueabihf": "1.4.6", - "@swc/core-linux-arm64-gnu": "1.4.6", - "@swc/core-linux-arm64-musl": "1.4.6", - "@swc/core-linux-x64-gnu": "1.4.6", - "@swc/core-linux-x64-musl": "1.4.6", - "@swc/core-win32-arm64-msvc": "1.4.6", - "@swc/core-win32-ia32-msvc": "1.4.6", - "@swc/core-win32-x64-msvc": "1.4.6" + "@swc/core-darwin-arm64": "1.4.17", + "@swc/core-darwin-x64": "1.4.17", + "@swc/core-linux-arm-gnueabihf": "1.4.17", + "@swc/core-linux-arm64-gnu": "1.4.17", + "@swc/core-linux-arm64-musl": "1.4.17", + "@swc/core-linux-x64-gnu": "1.4.17", + "@swc/core-linux-x64-musl": "1.4.17", + "@swc/core-win32-arm64-msvc": "1.4.17", + "@swc/core-win32-ia32-msvc": "1.4.17", + "@swc/core-win32-x64-msvc": "1.4.17" }, "peerDependencies": { "@swc/helpers": "^0.5.0" @@ -2145,9 +2149,9 @@ } }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.6.tgz", - "integrity": "sha512-bpggpx/BfLFyy48aUKq1PsNUxb7J6CINlpAUk0V4yXfmGnpZH80Gp1pM3GkFDQyCfq7L7IpjPrIjWQwCrL4hYw==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.17.tgz", + "integrity": "sha512-HVl+W4LezoqHBAYg2JCqR+s9ife9yPfgWSj37iIawLWzOmuuJ7jVdIB7Ee2B75bEisSEKyxRlTl6Y1Oq3owBgw==", "cpu": [ "arm64" ], @@ -2161,9 +2165,9 @@ } }, "node_modules/@swc/core-darwin-x64": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.6.tgz", - "integrity": "sha512-vJn+/ZuBTg+vtNkcmgZdH6FQpa0hFVdnB9bAeqYwKkyqP15zaPe6jfC+qL2y/cIeC7ASvHXEKrnCZgBLxfVQ9w==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.17.tgz", + "integrity": "sha512-WYRO9Fdzq4S/he8zjW5I95G1zcvyd9yyD3Tgi4/ic84P5XDlSMpBDpBLbr/dCPjmSg7aUXxNQqKqGkl6dQxYlA==", "cpu": [ "x64" ], @@ -2177,9 +2181,9 @@ } }, "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.6.tgz", - "integrity": "sha512-hEmYcB/9XBAl02MtuVHszhNjQpjBzhk/NFulnU33tBMbNZpy2TN5yTsitezMq090QXdDz8sKIALApDyg07ZR8g==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.17.tgz", + "integrity": "sha512-cgbvpWOvtMH0XFjvwppUCR+Y+nf6QPaGu6AQ5hqCP+5Lv2zO5PG0RfasC4zBIjF53xgwEaaWmGP5/361P30X8Q==", "cpu": [ "arm" ], @@ -2193,9 +2197,9 @@ } }, "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.6.tgz", - "integrity": "sha512-/UCYIVoGpm2YVvGHZM2QOA3dexa28BjcpLAIYnoCbgH5f7ulDhE8FAIO/9pasj+kixDBsdqewHfsNXFYlgGJjQ==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.17.tgz", + "integrity": "sha512-l7zHgaIY24cF9dyQ/FOWbmZDsEj2a9gRFbmgx2u19e3FzOPuOnaopFj0fRYXXKCmtdx+anD750iBIYnTR+pq/Q==", "cpu": [ "arm64" ], @@ -2209,9 +2213,9 @@ } }, "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.6.tgz", - "integrity": "sha512-LGQsKJ8MA9zZ8xHCkbGkcPSmpkZL2O7drvwsGKynyCttHhpwVjj9lguhD4DWU3+FWIsjvho5Vu0Ggei8OYi/Lw==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.17.tgz", + "integrity": "sha512-qhH4gr9gAlVk8MBtzXbzTP3BJyqbAfUOATGkyUtohh85fPXQYuzVlbExix3FZXTwFHNidGHY8C+ocscI7uDaYw==", "cpu": [ "arm64" ], @@ -2225,9 +2229,9 @@ } }, "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.6.tgz", - "integrity": "sha512-10JL2nLIreMQDKvq2TECnQe5fCuoqBHu1yW8aChqgHUyg9d7gfZX/kppUsuimqcgRBnS0AjTDAA+JF6UsG/2Yg==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.17.tgz", + "integrity": "sha512-vRDFATL1oN5oZMImkwbgSHEkp8xG1ofEASBypze01W1Tqto8t+yo6gsp69wzCZBlxldsvPpvFZW55Jq0Rn+UnA==", "cpu": [ "x64" ], @@ -2241,9 +2245,9 @@ } }, "node_modules/@swc/core-linux-x64-musl": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.6.tgz", - "integrity": "sha512-EGyjFVzVY6Do89x8sfah7I3cuP4MwtwzmA6OlfD/KASqfCFf5eIaEBMbajgR41bVfMV7lK72lwAIea5xEyq1AQ==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.17.tgz", + "integrity": "sha512-zQNPXAXn3nmPqv54JVEN8k2JMEcMTQ6veVuU0p5O+A7KscJq+AGle/7ZQXzpXSfUCXlLMX4wvd+rwfGhh3J4cw==", "cpu": [ "x64" ], @@ -2257,9 +2261,9 @@ } }, "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.6.tgz", - "integrity": "sha512-gfW9AuXvwSyK07Vb8Y8E9m2oJZk21WqcD+X4BZhkbKB0TCZK0zk1j/HpS2UFlr1JB2zPKPpSWLU3ll0GEHRG2A==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.17.tgz", + "integrity": "sha512-z86n7EhOwyzxwm+DLE5NoLkxCTme2lq7QZlDjbQyfCxOt6isWz8rkW5QowTX8w9Rdmk34ncrjSLvnHOeLY17+w==", "cpu": [ "arm64" ], @@ -2273,9 +2277,9 @@ } }, "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.6.tgz", - "integrity": "sha512-ZuQm81FhhvNVYtVb9GfZ+Du6e7fZlkisWvuCeBeRiyseNt1tcrQ8J3V67jD2nxje8CVXrwG3oUIbPcybv2rxfQ==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.17.tgz", + "integrity": "sha512-JBwuSTJIgiJJX6wtr4wmXbfvOswHFj223AumUrK544QV69k60FJ9q2adPW9Csk+a8wm1hLxq4HKa2K334UHJ/g==", "cpu": [ "ia32" ], @@ -2289,9 +2293,9 @@ } }, "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.6.tgz", - "integrity": "sha512-UagPb7w5V0uzWSjrXwOavGa7s9iv3wrVdEgWy+/inm0OwY4lj3zpK9qDnMWAwYLuFwkI3UG4Q3dH8wD+CUUcjw==", + "version": "1.4.17", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.17.tgz", + "integrity": "sha512-jFkOnGQamtVDBm3MF5Kq1lgW8vx4Rm1UvJWRUfg+0gx7Uc3Jp3QMFeMNw/rDNQYRDYPG3yunCC+2463ycd5+dg==", "cpu": [ "x64" ], @@ -2311,17 +2315,20 @@ "dev": true }, "node_modules/@swc/types": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", - "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", - "dev": true + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.6.tgz", + "integrity": "sha512-/JLo/l2JsT/LRd80C3HfbmVpxOAJ11FO2RCEslFrgzLltoP9j8XIbsyDcfCt2WWyX+CM96rBoNM+IToAkFOugg==", + "dev": true, + "dependencies": { + "@swc/counter": "^0.1.3" + } }, "node_modules/@tanstack/react-virtual": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.2.0.tgz", - "integrity": "sha512-OEdMByf2hEfDa6XDbGlZN8qO6bTjlNKqjM3im9JG+u3mCL8jALy0T/67oDI001raUUPh1Bdmfn4ZvPOV5knpcg==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.5.0.tgz", + "integrity": "sha512-rtvo7KwuIvqK9zb0VZ5IL7fiJAEnG+0EiFZz8FUOs+2mhGqdGmjKIaT1XU7Zq0eFqL0jonLlhbayJI/J2SA/Bw==", "dependencies": { - "@tanstack/virtual-core": "3.2.0" + "@tanstack/virtual-core": "3.5.0" }, "funding": { "type": "github", @@ -2333,9 +2340,9 @@ } }, "node_modules/@tanstack/virtual-core": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.2.0.tgz", - "integrity": "sha512-P5XgYoAw/vfW65byBbJQCw+cagdXDT/qH6wmABiLt4v4YBT2q2vqCOhihe+D1Nt325F/S/0Tkv6C5z0Lv+VBQQ==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.5.0.tgz", + "integrity": "sha512-KnPRCkQTyqhanNC0K63GBG3wA8I+D1fQuVnAvcBF8f13akOKeQp1gSbu6f77zCxhEk727iV5oQnbHLYzHrECLg==", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" @@ -2369,9 +2376,9 @@ "dev": true }, "node_modules/@types/prop-types": { - "version": "15.7.11", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.11.tgz", - "integrity": "sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==", + "version": "15.7.12", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", + "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==", "devOptional": true }, "node_modules/@types/ramda": { @@ -2383,9 +2390,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.79", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.79.tgz", - "integrity": "sha512-RwGAGXPl9kSXwdNTafkOEuFrTBD5SA2B3iEB96xi8+xu5ddUa/cpvyVCSNn+asgLCTHkb5ZxN8gbuibYJi4s1w==", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.1.tgz", + "integrity": "sha512-V0kuGBX3+prX+DQ/7r2qsv1NsdfnCLnTgnRJ1pYnxykBhGMz+qj+box5lq7XsO5mtZsBqpjwwTu/7wszPfMBcw==", "devOptional": true, "dependencies": { "@types/prop-types": "*", @@ -2393,9 +2400,9 @@ } }, "node_modules/@types/react-dom": { - "version": "18.2.25", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.25.tgz", - "integrity": "sha512-o/V48vf4MQh7juIKZU2QGDfli6p1+OOi5oXx36Hffpc9adsHeXjVp8rHuPkjd8VT8sOJ2Zp05HR7CdpGTIUFUA==", + "version": "18.3.0", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", + "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", "devOptional": true, "dependencies": { "@types/react": "*" @@ -2435,16 +2442,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.1.tgz", - "integrity": "sha512-KwfdWXJBOviaBVhxO3p5TJiLpNuh2iyXyjmWN0f1nU87pwyvfS0EmjC6ukQVYVFJd/K1+0NWGPDXiyEyQorn0Q==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.8.0.tgz", + "integrity": "sha512-gFTT+ezJmkwutUPmB0skOj3GZJtlEGnlssems4AjkVweUPGj7jRwwqg0Hhg7++kPGJqKtTYx+R05Ftww372aIg==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "7.7.1", - "@typescript-eslint/type-utils": "7.7.1", - "@typescript-eslint/utils": "7.7.1", - "@typescript-eslint/visitor-keys": "7.7.1", + "@typescript-eslint/scope-manager": "7.8.0", + "@typescript-eslint/type-utils": "7.8.0", + "@typescript-eslint/utils": "7.8.0", + "@typescript-eslint/visitor-keys": "7.8.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.3.1", @@ -2470,15 +2477,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.1.tgz", - "integrity": "sha512-vmPzBOOtz48F6JAGVS/kZYk4EkXao6iGrD838sp1w3NQQC0W8ry/q641KU4PrG7AKNAf56NOcR8GOpH8l9FPCw==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.8.0.tgz", + "integrity": "sha512-KgKQly1pv0l4ltcftP59uQZCi4HUYswCLbTqVZEJu7uLX8CTLyswqMLqLN+2QFz4jCptqWVV4SB7vdxcH2+0kQ==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.7.1", - "@typescript-eslint/types": "7.7.1", - "@typescript-eslint/typescript-estree": "7.7.1", - "@typescript-eslint/visitor-keys": "7.7.1", + "@typescript-eslint/scope-manager": "7.8.0", + "@typescript-eslint/types": "7.8.0", + "@typescript-eslint/typescript-estree": "7.8.0", + "@typescript-eslint/visitor-keys": "7.8.0", "debug": "^4.3.4" }, "engines": { @@ -2498,13 +2505,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.1.tgz", - "integrity": "sha512-PytBif2SF+9SpEUKynYn5g1RHFddJUcyynGpztX3l/ik7KmZEv19WCMhUBkHXPU9es/VWGD3/zg3wg90+Dh2rA==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.8.0.tgz", + "integrity": "sha512-viEmZ1LmwsGcnr85gIq+FCYI7nO90DVbE37/ll51hjv9aG+YZMb4WDE2fyWpUR4O/UrhGRpYXK/XajcGTk2B8g==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.1", - "@typescript-eslint/visitor-keys": "7.7.1" + "@typescript-eslint/types": "7.8.0", + "@typescript-eslint/visitor-keys": "7.8.0" }, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2515,13 +2522,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.1.tgz", - "integrity": "sha512-ZksJLW3WF7o75zaBPScdW1Gbkwhd/lyeXGf1kQCxJaOeITscoSl0MjynVvCzuV5boUz/3fOI06Lz8La55mu29Q==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.8.0.tgz", + "integrity": "sha512-H70R3AefQDQpz9mGv13Uhi121FNMh+WEaRqcXTX09YEDky21km4dV1ZXJIp8QjXc4ZaVkXVdohvWDzbnbHDS+A==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.7.1", - "@typescript-eslint/utils": "7.7.1", + "@typescript-eslint/typescript-estree": "7.8.0", + "@typescript-eslint/utils": "7.8.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -2542,9 +2549,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.1.tgz", - "integrity": "sha512-AmPmnGW1ZLTpWa+/2omPrPfR7BcbUU4oha5VIbSbS1a1Tv966bklvLNXxp3mrbc+P2j4MNOTfDffNsk4o0c6/w==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.8.0.tgz", + "integrity": "sha512-wf0peJ+ZGlcH+2ZS23aJbOv+ztjeeP8uQ9GgwMJGVLx/Nj9CJt17GWgWWoSmoRVKAX2X+7fzEnAjxdvK2gqCLw==", "dev": true, "engines": { "node": "^18.18.0 || >=20.0.0" @@ -2555,13 +2562,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.1.tgz", - "integrity": "sha512-CXe0JHCXru8Fa36dteXqmH2YxngKJjkQLjxzoj6LYwzZ7qZvgsLSc+eqItCrqIop8Vl2UKoAi0StVWu97FQZIQ==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.8.0.tgz", + "integrity": "sha512-5pfUCOwK5yjPaJQNy44prjCwtr981dO8Qo9J9PwYXZ0MosgAbfEMB008dJ5sNo3+/BN6ytBPuSvXUg9SAqB0dg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.1", - "@typescript-eslint/visitor-keys": "7.7.1", + "@typescript-eslint/types": "7.8.0", + "@typescript-eslint/visitor-keys": "7.8.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2583,17 +2590,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.1.tgz", - "integrity": "sha512-QUvBxPEaBXf41ZBbaidKICgVL8Hin0p6prQDu6bbetWo39BKbWJxRsErOzMNT1rXvTll+J7ChrbmMCXM9rsvOQ==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.8.0.tgz", + "integrity": "sha512-L0yFqOCflVqXxiZyXrDr80lnahQfSOfc9ELAAZ75sqicqp2i36kEZZGuUymHNFoYOqxRT05up760b4iGsl02nQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.15", "@types/semver": "^7.5.8", - "@typescript-eslint/scope-manager": "7.7.1", - "@typescript-eslint/types": "7.7.1", - "@typescript-eslint/typescript-estree": "7.7.1", + "@typescript-eslint/scope-manager": "7.8.0", + "@typescript-eslint/types": "7.8.0", + "@typescript-eslint/typescript-estree": "7.8.0", "semver": "^7.6.0" }, "engines": { @@ -2608,12 +2615,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.1.tgz", - "integrity": "sha512-gBL3Eq25uADw1LQ9kVpf3hRM+DWzs0uZknHYK3hq4jcTPqVCClHGDnB6UUUV2SFeBeA4KWHWbbLqmbGcZ4FYbw==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.8.0.tgz", + "integrity": "sha512-q4/gibTNBQNA0lGyYQCmWRS5D15n8rXh4QjK3KV+MBPlTYHpfBUT3D3PaPR/HeNiI9W6R7FvlkcGhNyAoP+caA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.1", + "@typescript-eslint/types": "7.8.0", "eslint-visitor-keys": "^3.4.3" }, "engines": { @@ -2723,21 +2730,6 @@ "node": ">=10" } }, - "node_modules/array-buffer-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", - "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", - "dependencies": { - "call-bind": "^1.0.5", - "is-array-buffer": "^3.0.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/array-union": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", @@ -2747,27 +2739,6 @@ "node": ">=8" } }, - "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", - "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", - "dependencies": { - "array-buffer-byte-length": "^1.0.1", - "call-bind": "^1.0.5", - "define-properties": "^1.2.1", - "es-abstract": "^1.22.3", - "es-errors": "^1.2.1", - "get-intrinsic": "^1.2.3", - "is-array-buffer": "^3.0.4", - "is-shared-array-buffer": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -2789,20 +2760,6 @@ "tslib": "^2.3.0" } }, - "node_modules/available-typed-arrays": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", - "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", - "dependencies": { - "possible-typed-array-names": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/axios": { "version": "1.6.8", "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz", @@ -3116,54 +3073,6 @@ "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", "devOptional": true }, - "node_modules/data-view-buffer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", - "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", - "dependencies": { - "call-bind": "^1.0.6", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/data-view-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", - "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", - "dependencies": { - "call-bind": "^1.0.7", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/data-view-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", - "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", - "dependencies": { - "call-bind": "^1.0.6", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -3234,22 +3143,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/define-properties": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", - "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", - "dependencies": { - "define-data-property": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -3329,65 +3222,6 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/es-abstract": { - "version": "1.23.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", - "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", - "dependencies": { - "array-buffer-byte-length": "^1.0.1", - "arraybuffer.prototype.slice": "^1.0.3", - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "data-view-buffer": "^1.0.1", - "data-view-byte-length": "^1.0.1", - "data-view-byte-offset": "^1.0.0", - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "es-set-tostringtag": "^2.0.3", - "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.4", - "get-symbol-description": "^1.0.2", - "globalthis": "^1.0.3", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2", - "has-proto": "^1.0.3", - "has-symbols": "^1.0.3", - "hasown": "^2.0.2", - "internal-slot": "^1.0.7", - "is-array-buffer": "^3.0.4", - "is-callable": "^1.2.7", - "is-data-view": "^1.0.1", - "is-negative-zero": "^2.0.3", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.3", - "is-string": "^1.0.7", - "is-typed-array": "^1.1.13", - "is-weakref": "^1.0.2", - "object-inspect": "^1.13.1", - "object-keys": "^1.1.1", - "object.assign": "^4.1.5", - "regexp.prototype.flags": "^1.5.2", - "safe-array-concat": "^1.1.2", - "safe-regex-test": "^1.0.3", - "string.prototype.trim": "^1.2.9", - "string.prototype.trimend": "^1.0.8", - "string.prototype.trimstart": "^1.0.8", - "typed-array-buffer": "^1.0.2", - "typed-array-byte-length": "^1.0.1", - "typed-array-byte-offset": "^1.0.2", - "typed-array-length": "^1.0.6", - "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.15" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/es-define-property": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", @@ -3407,46 +3241,6 @@ "node": ">= 0.4" } }, - "node_modules/es-object-atoms": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", - "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-set-tostringtag": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", - "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", - "dependencies": { - "get-intrinsic": "^1.2.4", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.1" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/esbuild": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", @@ -3553,9 +3347,9 @@ } }, "node_modules/eslint-plugin-react-hooks": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", - "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.2.tgz", + "integrity": "sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ==", "dev": true, "engines": { "node": ">=10" @@ -3849,14 +3643,6 @@ } } }, - "node_modules/for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", - "dependencies": { - "is-callable": "^1.1.3" - } - }, "node_modules/form-data": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", @@ -3953,31 +3739,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/function.prototype.name": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", - "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "functions-have-names": "^1.2.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/get-intrinsic": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", @@ -4004,22 +3765,6 @@ "node": ">=6" } }, - "node_modules/get-symbol-description": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", - "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", - "dependencies": { - "call-bind": "^1.0.5", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -4092,20 +3837,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/globalthis": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", - "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", - "dependencies": { - "define-properties": "^1.1.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/globby": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", @@ -4187,14 +3918,6 @@ "graphql": "^15.5.0 || ^16.0.0" } }, - "node_modules/has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -4236,20 +3959,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -4380,19 +4089,6 @@ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", "optional": true }, - "node_modules/internal-slot": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", - "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", - "dependencies": { - "es-errors": "^1.3.0", - "hasown": "^2.0.0", - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/invariant": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", @@ -4423,125 +4119,45 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-array-buffer": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", - "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-bigint": { + "node_modules/is-decimal": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", - "dependencies": { - "has-bigints": "^1.0.1" - }, + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", + "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-boolean-object": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "bin": { + "is-docker": "cli.js" }, "engines": { - "node": ">= 0.4" + "node": ">=8" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=0.10.0" } }, - "node_modules/is-data-view": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", - "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, "dependencies": { - "is-typed-array": "^1.1.13" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-date-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-decimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", - "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-docker": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", - "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", - "bin": { - "is-docker": "cli.js" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.1" + "is-extglob": "^2.1.1" }, "engines": { "node": ">=0.10.0" @@ -4556,17 +4172,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-negative-zero": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", - "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -4575,20 +4180,6 @@ "node": ">=0.12.0" } }, - "node_modules/is-number-object": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-path-inside": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", @@ -4617,88 +4208,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-shared-array-buffer": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", - "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", - "dependencies": { - "call-bind": "^1.0.7" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-symbol": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-typed-array": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", - "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", - "dependencies": { - "which-typed-array": "^1.1.14" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-weakref": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-wsl": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", @@ -5079,9 +4588,9 @@ "dev": true }, "node_modules/node-abi": { - "version": "3.60.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.60.0.tgz", - "integrity": "sha512-zcGgwoXbzw9NczqbGzAWL/ToDYAxv1V8gL1D67ClbdkIfeeDBbY0GelZtC25ayLvVjr2q2cloHeQV1R0QAWqRQ==", + "version": "3.62.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.62.0.tgz", + "integrity": "sha512-CPMcGa+y33xuL1E0TcNIu4YyaZCxnnvkVaEXrsosR3FxN+fV8xvb7Mzpb7IgKler10qeMkE6+Dp8qJhpzdq35g==", "optional": true, "dependencies": { "semver": "^7.3.5" @@ -5158,23 +4667,6 @@ "node": ">= 0.4" } }, - "node_modules/object.assign": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", - "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", - "dependencies": { - "call-bind": "^1.0.5", - "define-properties": "^1.2.1", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -5199,17 +4691,17 @@ } }, "node_modules/optionator": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", - "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, "dependencies": { - "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "^0.4.0" + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" }, "engines": { "node": ">= 0.8.0" @@ -5392,14 +4884,6 @@ "tslib": "^2.1.0" } }, - "node_modules/possible-typed-array-names": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", - "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", - "engines": { - "node": ">= 0.4" - } - }, "node_modules/postcss": { "version": "8.4.38", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", @@ -5641,9 +5125,9 @@ } }, "node_modules/react": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", - "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", "dependencies": { "loose-envify": "^1.1.0" }, @@ -5676,15 +5160,15 @@ } }, "node_modules/react-dom": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", - "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", "dependencies": { "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" + "scheduler": "^0.23.2" }, "peerDependencies": { - "react": "^18.2.0" + "react": "^18.3.1" } }, "node_modules/react-immutable-proptypes": { @@ -5721,32 +5205,6 @@ "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, - "node_modules/react-redux": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.1.1.tgz", - "integrity": "sha512-5ynfGDzxxsoV73+4czQM56qF43vsmgJsO22rmAvU5tZT2z5Xow/A2uhhxwXuGTxgdReF3zcp7A80gma2onRs1A==", - "dependencies": { - "@types/use-sync-external-store": "^0.0.3", - "use-sync-external-store": "^1.0.0" - }, - "peerDependencies": { - "@types/react": "^18.2.25", - "react": "^18.0", - "react-native": ">=0.69", - "redux": "^5.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "react-native": { - "optional": true - }, - "redux": { - "optional": true - } - } - }, "node_modules/react-remove-scroll": { "version": "2.5.5", "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", @@ -5883,23 +5341,6 @@ "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" }, - "node_modules/regexp.prototype.flags": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", - "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", - "dependencies": { - "call-bind": "^1.0.6", - "define-properties": "^1.2.1", - "es-errors": "^1.3.0", - "set-function-name": "^2.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/remarkable": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/remarkable/-/remarkable-2.0.1.tgz", @@ -5984,9 +5425,9 @@ } }, "node_modules/rollup": { - "version": "4.13.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.13.2.tgz", - "integrity": "sha512-MIlLgsdMprDBXC+4hsPgzWUasLO9CE4zOkj/u6j+Z6j5A4zRY+CtiXAdJyPtgCsc42g658Aeh1DlrdVEJhsL2g==", + "version": "4.17.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.17.2.tgz", + "integrity": "sha512-/9ClTJPByC0U4zNLowV1tMBe8yMEAxewtR3cUNX5BoEpGH3dQEWpJLr6CLp0fPdYRF/fzVOgvDb1zXuakwF5kQ==", "dev": true, "dependencies": { "@types/estree": "1.0.5" @@ -5999,21 +5440,22 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.13.2", - "@rollup/rollup-android-arm64": "4.13.2", - "@rollup/rollup-darwin-arm64": "4.13.2", - "@rollup/rollup-darwin-x64": "4.13.2", - "@rollup/rollup-linux-arm-gnueabihf": "4.13.2", - "@rollup/rollup-linux-arm64-gnu": "4.13.2", - "@rollup/rollup-linux-arm64-musl": "4.13.2", - "@rollup/rollup-linux-powerpc64le-gnu": "4.13.2", - "@rollup/rollup-linux-riscv64-gnu": "4.13.2", - "@rollup/rollup-linux-s390x-gnu": "4.13.2", - "@rollup/rollup-linux-x64-gnu": "4.13.2", - "@rollup/rollup-linux-x64-musl": "4.13.2", - "@rollup/rollup-win32-arm64-msvc": "4.13.2", - "@rollup/rollup-win32-ia32-msvc": "4.13.2", - "@rollup/rollup-win32-x64-msvc": "4.13.2", + "@rollup/rollup-android-arm-eabi": "4.17.2", + "@rollup/rollup-android-arm64": "4.17.2", + "@rollup/rollup-darwin-arm64": "4.17.2", + "@rollup/rollup-darwin-x64": "4.17.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.17.2", + "@rollup/rollup-linux-arm-musleabihf": "4.17.2", + "@rollup/rollup-linux-arm64-gnu": "4.17.2", + "@rollup/rollup-linux-arm64-musl": "4.17.2", + "@rollup/rollup-linux-powerpc64le-gnu": "4.17.2", + "@rollup/rollup-linux-riscv64-gnu": "4.17.2", + "@rollup/rollup-linux-s390x-gnu": "4.17.2", + "@rollup/rollup-linux-x64-gnu": "4.17.2", + "@rollup/rollup-linux-x64-musl": "4.17.2", + "@rollup/rollup-win32-arm64-msvc": "4.17.2", + "@rollup/rollup-win32-ia32-msvc": "4.17.2", + "@rollup/rollup-win32-x64-msvc": "4.17.2", "fsevents": "~2.3.2" } }, @@ -6040,23 +5482,6 @@ "queue-microtask": "^1.2.2" } }, - "node_modules/safe-array-concat": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", - "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", - "dependencies": { - "call-bind": "^1.0.7", - "get-intrinsic": "^1.2.4", - "has-symbols": "^1.0.3", - "isarray": "^2.0.5" - }, - "engines": { - "node": ">=0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -6076,26 +5501,10 @@ } ] }, - "node_modules/safe-regex-test": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", - "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", - "dependencies": { - "call-bind": "^1.0.6", - "es-errors": "^1.3.0", - "is-regex": "^1.1.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/scheduler": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", - "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", "dependencies": { "loose-envify": "^1.1.0" } @@ -6129,29 +5538,15 @@ } }, "node_modules/set-function-length": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", - "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dependencies": { - "define-data-property": "^1.1.2", + "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.3", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/set-function-name": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", - "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", - "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "functions-have-names": "^1.2.3", "has-property-descriptors": "^1.0.2" }, "engines": { @@ -6207,9 +5602,9 @@ } }, "node_modules/short-unique-id": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/short-unique-id/-/short-unique-id-5.0.3.tgz", - "integrity": "sha512-yhniEILouC0s4lpH0h7rJsfylZdca10W9mDJRAFh3EpcSUanCHGb0R7kcFOIUCZYSAPo0PUD5ZxWQdW0T4xaug==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/short-unique-id/-/short-unique-id-5.1.1.tgz", + "integrity": "sha512-qqisAdcWLXSTNK2MKXI66ldHpTKWv+5c28TPG//8Tv9mwC2UL/J/w2EsJaPzVxVRTmoBc4KwGIuZiz58wButfA==", "bin": { "short-unique-id": "bin/short-unique-id", "suid": "bin/short-unique-id" @@ -6323,52 +5718,6 @@ "safe-buffer": "~5.2.0" } }, - "node_modules/string.prototype.trim": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", - "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.0", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimend": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", - "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimstart": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", - "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -6420,9 +5769,9 @@ } }, "node_modules/swagger-client": { - "version": "3.27.0", - "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.27.0.tgz", - "integrity": "sha512-DyuHrUzHxysUbhsCgPfsWKH3EULDwjD7jkRl0SHvXAd6gtZS3e3RHUZjDBA4cbrcgDiIuC7Ju/YIOkE+rKCutw==", + "version": "3.27.2", + "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.27.2.tgz", + "integrity": "sha512-7dVtvyCXmpHXmv5xgS5DyAyxN17l75qmxN8BCNb/z3sj+kYDsxwJeJP3X6enPyxtZsMZFDMxC+EtiFbml7pS6Q==", "dependencies": { "@babel/runtime-corejs3": "^7.22.15", "@swagger-api/apidom-core": ">=0.99.1 <1.0.0", @@ -6438,7 +5787,7 @@ "node-abort-controller": "^3.1.1", "node-fetch-commonjs": "^3.3.2", "qs": "^6.10.2", - "traverse": "~0.6.6" + "traverse": "=0.6.8" } }, "node_modules/swagger-client/node_modules/is-plain-object": { @@ -6450,9 +5799,9 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.17.0", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.17.0.tgz", - "integrity": "sha512-z9IwISLWdlrWfLT9p0eTOoW1eDHEu/InEzZhKxZA+ylUbbNHLIdMwKHaalbnv2dhjtsLs7Nf87hHJs2wKtn2iA==", + "version": "5.17.2", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.17.2.tgz", + "integrity": "sha512-jwhKQ0IdM1t77clbJ9EorL7+6B5Sr1mG+ryqSELxT5MaG4y3yOIyFbZ0Xn/EnSyRuww/V8FTK/0KIX3gf41taw==", "dependencies": { "@babel/runtime-corejs3": "^7.24.4", "@braintree/sanitize-url": "=7.0.1", @@ -6483,7 +5832,7 @@ "reselect": "^5.1.0", "serialize-error": "^8.1.0", "sha.js": "^2.4.11", - "swagger-client": "^3.27.0", + "swagger-client": "^3.27.2", "url-parse": "^1.5.10", "xml": "=1.0.1", "xml-but-prettier": "^1.0.1", @@ -6494,6 +5843,32 @@ "react-dom": ">=16.8.0 <19" } }, + "node_modules/swagger-ui-react/node_modules/react-redux": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.1.1.tgz", + "integrity": "sha512-5ynfGDzxxsoV73+4czQM56qF43vsmgJsO22rmAvU5tZT2z5Xow/A2uhhxwXuGTxgdReF3zcp7A80gma2onRs1A==", + "dependencies": { + "@types/use-sync-external-store": "^0.0.3", + "use-sync-external-store": "^1.0.0" + }, + "peerDependencies": { + "@types/react": "^18.2.25", + "react": "^18.0", + "react-native": ">=0.69", + "redux": "^5.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react-native": { + "optional": true + }, + "redux": { + "optional": true + } + } + }, "node_modules/tar-fs": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", @@ -6556,14 +5931,9 @@ "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==" }, "node_modules/traverse": { - "version": "0.6.9", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", - "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", - "dependencies": { - "gopd": "^1.0.1", - "typedarray.prototype.slice": "^1.0.3", - "which-typed-array": "^1.1.15" - }, + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.8.tgz", + "integrity": "sha512-aXJDbk6SnumuaZSANd21XAo15ucCDE38H4fkqiGsc3MhCK+wOlZvLP9cB/TvpHT0mOyWgC4Z8EwRlzqYSUzdsA==", "engines": { "node": ">= 0.4" }, @@ -6664,94 +6034,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/typed-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", - "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", - "dependencies": { - "call-bind": "^1.0.7", - "es-errors": "^1.3.0", - "is-typed-array": "^1.1.13" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/typed-array-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", - "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", - "dependencies": { - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typed-array-byte-offset": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", - "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", - "dependencies": { - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typed-array-length": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", - "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", - "dependencies": { - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13", - "possible-typed-array-names": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typedarray.prototype.slice": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", - "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.0", - "es-errors": "^1.3.0", - "typed-array-buffer": "^1.0.2", - "typed-array-byte-offset": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/types-ramda": { "version": "0.29.10", "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.10.tgz", @@ -6778,20 +6060,6 @@ "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" }, - "node_modules/unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "dependencies": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/universalify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", @@ -6865,9 +6133,9 @@ } }, "node_modules/use-sync-external-store": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz", - "integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.2.tgz", + "integrity": "sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw==", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } @@ -6879,9 +6147,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.2.8", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.8.tgz", - "integrity": "sha512-OyZR+c1CE8yeHw5V5t59aXsUPPVTHMDjEZz8MgguLL/Q7NblxhZUlTu9xSPqlsUO/y+X7dlU05jdhvyycD55DA==", + "version": "5.2.10", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.10.tgz", + "integrity": "sha512-PAzgUZbP7msvQvqdSD+ErD5qGnSFiGOoWmV5yAKUEI0kdhjbH6nMWVyZQC/hSc4aXwc0oJ9aEdIiF9Oje0JFCw==", "dev": true, "dependencies": { "esbuild": "^0.20.1", @@ -6972,37 +6240,13 @@ "node": ">= 8" } }, - "node_modules/which-boxed-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", - "dependencies": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-typed-array": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", - "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", - "dependencies": { - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.2" - }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=0.10.0" } }, "node_modules/wrappy": { @@ -7037,9 +6281,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yaml": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.1.tgz", - "integrity": "sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz", + "integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==", "bin": { "yaml": "bin.mjs" }, diff --git a/playground/package.json b/playground/package.json index 2276fc2d31..845369d2c6 100644 --- a/playground/package.json +++ b/playground/package.json @@ -12,21 +12,21 @@ "dependencies": { "graphiql": "^3.2.0", "graphql": "^16.8.1", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "swagger-ui-react": "^5.17.0" + "react": "^18.3.1", + "react-dom": "^18.3.1", + "swagger-ui-react": "^5.17.2" }, "devDependencies": { - "@types/react": "^18.2.79", - "@types/react-dom": "^18.2.25", + "@types/react": "^18.3.1", + "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^7.7.1", "@typescript-eslint/parser": "^7.7.1", "@vitejs/plugin-react-swc": "^3.6.0", "eslint": "^8.57.0", - "eslint-plugin-react-hooks": "^4.6.0", + "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.6", "typescript": "^5.4.5", - "vite": "^5.2.8" + "vite": "^5.2.10" } } From 4248007344eaf4483483b2c6296bf4eb02376435 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Wed, 1 May 2024 17:03:29 -0400 Subject: [PATCH 44/49] fix: Add check to filter result for logical ops (#2573) ## Relevant issue(s) Resolves #2571 ## Description This PR adds a check post doc filtering in the fetcher to see if we need to rely on the external conditions instead to ensure a valid filtering operation. This is specially important when filtering with logical operators `_and` and `_or`. --- planner/filter/complex.go | 9 +- planner/filter/complex_test.go | 2 +- planner/type_join.go | 2 +- .../query/one_to_one/with_filter_test.go | 127 ++++++++++++++++++ 4 files changed, 135 insertions(+), 5 deletions(-) diff --git a/planner/filter/complex.go b/planner/filter/complex.go index acc2de4883..96597279cf 100644 --- a/planner/filter/complex.go +++ b/planner/filter/complex.go @@ -17,7 +17,7 @@ import ( // IsComplex returns true if the provided filter is complex. // A filter is considered complex if it contains a relation -// object withing an _or or _not operator not necessarily being +// object withing an _or, _and or _not operator not necessarily being // its direct child. func IsComplex(filter *mapper.Filter) bool { if filter == nil { @@ -31,8 +31,11 @@ func isComplex(conditions any, seekRelation bool) bool { case map[connor.FilterKey]any: for k, v := range typedCond { if op, ok := k.(*mapper.Operator); ok { - if (op.Operation == request.FilterOpOr && len(v.([]any)) > 1) || - op.Operation == request.FilterOpNot { + switch op.Operation { + case request.FilterOpOr, request.FilterOpAnd, request.FilterOpNot: + if v, ok := v.([]any); ok && len(v) == 1 { + continue + } if isComplex(v, true) { return true } diff --git a/planner/filter/complex_test.go b/planner/filter/complex_test.go index f16055df74..b344b5c89c 100644 --- a/planner/filter/complex_test.go +++ b/planner/filter/complex_test.go @@ -109,7 +109,7 @@ func TestIsComplex(t *testing.T) { m("published", m("rating", m("_gt", 4.0))), ), ), - isComplex: false, + isComplex: true, }, { name: "relation inside _and and _or", diff --git a/planner/type_join.go b/planner/type_join.go index be1ebb628e..7a983e1b78 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -345,7 +345,7 @@ func prepareScanNodeFilterForTypeJoin( parent.filter.Conditions = filter.Merge( parent.filter.Conditions, scan.filter.Conditions) } - filter.RemoveField(scan.filter, subType.Field) + scan.filter = nil } else { var parentFilter *mapper.Filter scan.filter, parentFilter = filter.SplitByFields(scan.filter, subType.Field) diff --git a/tests/integration/query/one_to_one/with_filter_test.go b/tests/integration/query/one_to_one/with_filter_test.go index 9d00cdd416..ab13634667 100644 --- a/tests/integration/query/one_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_one/with_filter_test.go @@ -492,3 +492,130 @@ func TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestQueryOneToOne_WithCompoundFiltersThatIncludesRelation_ShouldReturnResults(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation with _and filter that includes relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ + "name": "Painted House", + "rating": 4.9 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6 + Doc: `{ + "name": "Some Book", + "rating": 4.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-66ba0c48-4984-5b44-83dd-edb791a54b7d + Doc: `{ + "name": "Some Other Book", + "rating": 3.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-3bfe0092-e31f-5ebe-a3ba-fa18fac448a6 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true, + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-5dac8488-0f75-5ddf-b08b-804b3d33a239 + Doc: `{ + "name": "Some Writer", + "age": 45, + "verified": false, + "published_id": "bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-8b0c345b-dda7-573c-b5f1-5fa1d70593e1 + Doc: `{ + "name": "Some Other Writer", + "age": 30, + "verified": true, + "published_id": "bae-66ba0c48-4984-5b44-83dd-edb791a54b7d" + }`, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_or: [ + {rating: {_gt: 4.0}}, + {author: {age: {_eq: 30}}} + ]}) { + name + rating + } + }`, + Results: []map[string]any{ + { + "name": "Some Other Book", + "rating": 3.0, + }, + { + "name": "Painted House", + "rating": 4.9, + }, + }, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_and: [ + {rating: {_ge: 4.0}}, + {author: {age: {_eq: 45}}} + ]}) { + name + rating + } + }`, + Results: []map[string]any{ + { + "name": "Some Book", + "rating": 4.0, + }, + }, + }, + testUtils.Request{ + // This is the same as {_not: {_and: [{rating: {_ge: 4.0}}, {author: {age: {_eq: 45}}}]}} + Request: `query { + Book(filter: {_not: { + rating: {_ge: 4.0}, + author: {age: {_eq: 45}} + }}) { + name + rating + } + }`, + Results: []map[string]any{ + { + "name": "Some Other Book", + "rating": 3.0, + }, + { + "name": "Painted House", + "rating": 4.9, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 51ea2143a13a558ff61000181c2539eadc062ebb Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 2 May 2024 08:21:50 -0400 Subject: [PATCH 45/49] fix: Handle compound filters on related indexed fields (#2575) ## Relevant issue(s) Resolves #2572 ## Description Handles compound filters targeting related indexed fields, and one-many joins from the many side. The invertableJoin issue may be affecting non indexed joins. There is another issue in this space not solved by this PR: https://github.com/sourcenetwork/defradb/issues/2574 --- planner/filter/complex.go | 3 - planner/filter/complex_test.go | 2 +- planner/planner.go | 2 +- planner/type_join.go | 33 +- ...uery_with_compound_filter_relation_test.go | 362 ++++++++++++++++++ 5 files changed, 394 insertions(+), 8 deletions(-) create mode 100644 tests/integration/index/query_with_compound_filter_relation_test.go diff --git a/planner/filter/complex.go b/planner/filter/complex.go index 96597279cf..ce72ead07e 100644 --- a/planner/filter/complex.go +++ b/planner/filter/complex.go @@ -33,9 +33,6 @@ func isComplex(conditions any, seekRelation bool) bool { if op, ok := k.(*mapper.Operator); ok { switch op.Operation { case request.FilterOpOr, request.FilterOpAnd, request.FilterOpNot: - if v, ok := v.([]any); ok && len(v) == 1 { - continue - } if isComplex(v, true) { return true } diff --git a/planner/filter/complex_test.go b/planner/filter/complex_test.go index b344b5c89c..208860501f 100644 --- a/planner/filter/complex_test.go +++ b/planner/filter/complex_test.go @@ -80,7 +80,7 @@ func TestIsComplex(t *testing.T) { inputFilter: r("_or", m("published", m("rating", m("_gt", 4.0))), ), - isComplex: false, + isComplex: true, }, { name: "relation inside _or", diff --git a/planner/planner.go b/planner/planner.go index e0f1e07613..2a181ce621 100644 --- a/planner/planner.go +++ b/planner/planner.go @@ -361,7 +361,7 @@ func (p *Planner) tryOptimizeJoinDirection(node *invertibleTypeJoin, parentPlan desc := slct.collection.Description() for subFieldName, subFieldInd := range filteredSubFields { indexes := desc.GetIndexesOnField(subFieldName) - if len(indexes) > 0 { + if len(indexes) > 0 && !filter.IsComplex(parentPlan.selectNode.filter) { subInd := node.documentMapping.FirstIndexOfName(node.subTypeName) relatedField := mapper.Field{Name: node.subTypeName, Index: subInd} fieldFilter := filter.UnwrapRelation(filter.CopyField( diff --git a/planner/type_join.go b/planner/type_join.go index 7a983e1b78..231451689c 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -473,7 +473,6 @@ func (dir *joinDirection) invert() { } type invertibleTypeJoin struct { - documentIterator docMapper root planNode @@ -487,6 +486,9 @@ type invertibleTypeJoin struct { secondaryFieldIndex immutable.Option[int] secondaryFetchLimit uint + // docsToYield contains documents read and ready to be yielded by this node. + docsToYield []core.Doc + dir joinDirection } @@ -556,6 +558,17 @@ func (join *invertibleTypeJoin) processSecondResult(secondDocs []core.Doc) (any, } func (join *invertibleTypeJoin) Next() (bool, error) { + if len(join.docsToYield) > 0 { + // If there is one or more documents in the queue, drop the first one - + // it will have been yielded by the last `Next()` call. + join.docsToYield = join.docsToYield[1:] + if len(join.docsToYield) > 0 { + // If there are still documents in the queue, return true yielding the next + // one in the queue. + return true, nil + } + } + hasFirstValue, err := join.dir.firstNode.Next() if err != nil || !hasFirstValue { @@ -577,7 +590,14 @@ func (join *invertibleTypeJoin) Next() (bool, error) { return false, err } if join.dir.secondNode == join.root { - join.root.Value().Fields[join.subSelect.Index] = join.subType.Value() + if len(secondDocs) == 0 { + return false, nil + } + for i := range secondDocs { + secondDocs[i].Fields[join.subSelect.Index] = join.subType.Value() + } + join.docsToYield = append(join.docsToYield, secondDocs...) + return true, nil } else { secondResult, secondIDResult := join.processSecondResult(secondDocs) join.dir.firstNode.Value().Fields[join.subSelect.Index] = secondResult @@ -596,11 +616,18 @@ func (join *invertibleTypeJoin) Next() (bool, error) { } } - join.currentValue = join.root.Value() + join.docsToYield = append(join.docsToYield, join.root.Value()) return true, nil } +func (join *invertibleTypeJoin) Value() core.Doc { + if len(join.docsToYield) == 0 { + return core.Doc{} + } + return join.docsToYield[0] +} + func (join *invertibleTypeJoin) invertJoinDirectionWithIndex( fieldFilter *mapper.Filter, index client.IndexDescription, diff --git a/tests/integration/index/query_with_compound_filter_relation_test.go b/tests/integration/index/query_with_compound_filter_relation_test.go new file mode 100644 index 0000000000..671a862601 --- /dev/null +++ b/tests/integration/index/query_with_compound_filter_relation_test.go @@ -0,0 +1,362 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestIndex_QueryWithIndexOnOneToManyRelationAndFilter_NoData(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _and: [ + { certificationBodyOrg: { name: { _eq: "Test" } } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationOrFilter_NoData(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _and: [ + { certificationBodyOrg: { name: { _eq: "Test" } } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationNotFilter_NoData(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _and: [ + { certificationBodyOrg: { name: { _eq: "Test" } } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationAndFilter_Data(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Source Inc." + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "DefraDB" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "LensVM" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "ESA" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-5e7a0a2c-40a0-572c-93b6-79930cab3317", + "name": "Horizon" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Zanzi" + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _and: [ + { certificationBodyOrg: { name: { _eq: "Source Inc." } } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{ + { + "name": "DefraDB", + }, + { + "name": "LensVM", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationOrFilter_Data(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Source Inc." + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "DefraDB" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "LensVM" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "ESA" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-5e7a0a2c-40a0-572c-93b6-79930cab3317", + "name": "Horizon" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Zanzi" + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _or: [ + { certificationBodyOrg: { name: { _eq: "Source Inc." } } }, + { name: { _eq: "Zanzi" } } + ] + } + ) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Zanzi", + }, + { + "name": "DefraDB", + }, + { + "name": "LensVM", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestIndex_QueryWithIndexOnOneToManyRelationNotFilter_Data(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Program { + name: String + certificationBodyOrg: Organization + } + + type Organization { + name: String @index + programs: [Program] + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Source Inc." + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-2b020aba-0681-5896-91d6-e3224938c32e", + "name": "DefraDB" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "ESA" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "certificationBodyOrg": "bae-5e7a0a2c-40a0-572c-93b6-79930cab3317", + "name": "Horizon" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Zanzi" + }`, + }, + testUtils.Request{ + Request: `query { + Program( + filter: { + _not: { + certificationBodyOrg: { name: { _eq: "Source Inc." } } + } + } + ) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Horizon", + }, + { + "name": "Zanzi", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From e7934b72db9293b5927264adbc90c735dc56a89a Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 2 May 2024 13:53:58 -0400 Subject: [PATCH 46/49] test(i): Fix dataless compound relational index filter tests (#2581) ## Relevant issue(s) Resolves #2580 ## Description Fix dataless compound relational index filter tests. --- .../index/query_with_compound_filter_relation_test.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/integration/index/query_with_compound_filter_relation_test.go b/tests/integration/index/query_with_compound_filter_relation_test.go index 671a862601..ff503d6d38 100644 --- a/tests/integration/index/query_with_compound_filter_relation_test.go +++ b/tests/integration/index/query_with_compound_filter_relation_test.go @@ -70,7 +70,7 @@ func TestIndex_QueryWithIndexOnOneToManyRelationOrFilter_NoData(t *testing.T) { Request: `query { Program( filter: { - _and: [ + _or: [ { certificationBodyOrg: { name: { _eq: "Test" } } } ] } @@ -105,9 +105,9 @@ func TestIndex_QueryWithIndexOnOneToManyRelationNotFilter_NoData(t *testing.T) { Request: `query { Program( filter: { - _and: [ - { certificationBodyOrg: { name: { _eq: "Test" } } } - ] + _not: { + certificationBodyOrg: { name: { _eq: "Test" } } + } } ) { name From 43cbf4715f18576d4dd521e9fa627bd227ddde2f Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Fri, 3 May 2024 11:09:02 -0400 Subject: [PATCH 47/49] fix: Return correct results from one-many indexed filter (#2579) ## Relevant issue(s) Resolves #2574 ## Description Return correct results from one-many joins filtered on an indexed field on the many side. The invertibleTypeJoin doesn't support the inversion of one-many joins and was converting them into one-one joins. This means that the index is no longer used if targeting the many side: https://github.com/sourcenetwork/defradb/issues/2578 (this support can be added after the release). --- planner/type_join.go | 9 +- .../index/query_with_relation_filter_test.go | 255 ++++++++++++++---- 2 files changed, 217 insertions(+), 47 deletions(-) diff --git a/planner/type_join.go b/planner/type_join.go index 231451689c..f93a8fe7db 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -279,6 +279,7 @@ func (p *Planner) makeTypeJoinOne( root: source, subType: selectPlan, subSelect: subType, + subSelectFieldDef: subTypeFieldDesc, rootName: immutable.Some(subTypeField.Name), subTypeName: subType.Name, isSecondary: !subTypeFieldDesc.IsPrimaryRelation, @@ -409,6 +410,7 @@ func (p *Planner) makeTypeJoinMany( root: source, subType: selectPlan, subSelect: subType, + subSelectFieldDef: subTypeFieldDesc, rootName: rootName, isSecondary: true, subTypeName: subType.Name, @@ -480,7 +482,8 @@ type invertibleTypeJoin struct { rootName immutable.Option[string] subTypeName string - subSelect *mapper.Select + subSelect *mapper.Select + subSelectFieldDef client.FieldDefinition isSecondary bool secondaryFieldIndex immutable.Option[int] @@ -636,6 +639,10 @@ func (join *invertibleTypeJoin) invertJoinDirectionWithIndex( // If the root field has no value it cannot be inverted return nil } + if join.subSelectFieldDef.Kind.IsArray() { + // invertibleTypeJoin does not support inverting one-many relations atm + return nil + } subScan := getScanNode(join.subType) subScan.tryAddField(join.rootName.Value() + request.RelatedObjectID) subScan.filter = fieldFilter diff --git a/tests/integration/index/query_with_relation_filter_test.go b/tests/integration/index/query_with_relation_filter_test.go index db2f351ae7..8fb6500eef 100644 --- a/tests/integration/index/query_with_relation_filter_test.go +++ b/tests/integration/index/query_with_relation_filter_test.go @@ -53,14 +53,16 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte testUtils.Request{ Request: req1, Results: []map[string]any{ + {"name": "Keenan"}, {"name": "Islam"}, {"name": "Shahzad"}, - {"name": "Keenan"}, }, }, testUtils.Request{ - Request: makeExplainQuery(req1), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(6).WithIndexFetches(3), + Request: makeExplainQuery(req1), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(450).WithIndexFetches(0), }, testUtils.Request{ Request: req2, @@ -69,8 +71,10 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte }, }, testUtils.Request{ - Request: makeExplainQuery(req2), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + Request: makeExplainQuery(req2), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(450).WithIndexFetches(0), }, }, } @@ -115,14 +119,16 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte testUtils.Request{ Request: req1, Results: []map[string]any{ + {"name": "Keenan"}, {"name": "Islam"}, {"name": "Shahzad"}, - {"name": "Keenan"}, }, }, testUtils.Request{ - Request: makeExplainQuery(req1), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(6).WithIndexFetches(3), + Request: makeExplainQuery(req1), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(450).WithIndexFetches(0), }, testUtils.Request{ Request: req2, @@ -131,8 +137,10 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte }, }, testUtils.Request{ - Request: makeExplainQuery(req2), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + Request: makeExplainQuery(req2), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(450).WithIndexFetches(0), }, }, } @@ -309,54 +317,224 @@ func TestQueryWithIndexOnOneToOnePrimaryRelation_IfFilterOnIndexedRelationWhileI testUtils.ExecuteTestCase(t, test) } -func TestQueryWithIndexOnOneToTwoRelation_IfFilterOnIndexedRelation_ShouldFilter(t *testing.T) { - req1 := `query { - User(filter: { - address: {city: {_eq: "Munich"}} - }) { - name - address { - city - } - } - }` - req2 := `query { +func TestQueryWithIndexOnOneToMany_IfFilterOnIndexedRelation_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Filter on indexed relation field in 1-N relations", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + devices: [Device] + } + + type Device { + model: String @index + manufacturer: String + owner: User + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Chris" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Addo" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Walkman", + "manufacturer": "Sony", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Walkman", + "manufacturer": "The Proclaimers", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Running Man", + "manufacturer": "Braveworld Productions", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: { + devices: {model: {_eq: "Walkman"}} + }) { + name + devices { + model + manufacturer + } + } + }`, + Results: []map[string]any{ + { + "name": "Chris", + "devices": []map[string]any{ + { + "model": "Walkman", + "manufacturer": "Sony", + }, + { + "model": "Walkman", + "manufacturer": "The Proclaimers", + }, + // The filter is on User, so all devices belonging to it will be returned + { + "model": "Running Man", + "manufacturer": "Braveworld Productions", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithIndexOnOneToMany_IfFilterOnIndexedRelation_ShouldFilterWithExplain(t *testing.T) { + req := `query { User(filter: { devices: {model: {_eq: "Walkman"}} }) { name devices { model + manufacturer } } }` test := testUtils.TestCase{ - Description: "Filter on indexed relation field in 1-1 and 1-N relations", + Description: "Filter on indexed relation field in 1-N relations", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` type User { - name: String - age: Int - address: Address - devices: [Device] + name: String + devices: [Device] } type Device { model: String @index + manufacturer: String owner: User - } + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Chris" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Walkman", + "manufacturer": "Sony", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Walkman", + "manufacturer": "The Proclaimers", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "model": "Running Man", + "manufacturer": "Braveworld Productions", + "owner": "bae-403d7337-f73e-5c81-8719-e853938c8985" + }`, + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + { + "name": "Chris", + "devices": []map[string]any{ + { + "model": "Walkman", + "manufacturer": "Sony", + }, + { + "model": "Walkman", + "manufacturer": "The Proclaimers", + }, + { + "model": "Running Man", + "manufacturer": "Braveworld Productions", + }, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + // The invertable join does not support inverting one-many relations, so the index is + // not used. + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(10).WithIndexFetches(0), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithIndexOnOneToOne_IfFilterOnIndexedRelation_ShouldFilter(t *testing.T) { + req := `query { + User(filter: { + address: {city: {_eq: "Munich"}} + }) { + name + address { + city + } + } + }` + test := testUtils.TestCase{ + Description: "Filter on indexed relation field in 1-1 relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + address: Address + } type Address { user: User @primary city: String @index - }`, + } + `, }, testUtils.CreatePredefinedDocs{ Docs: getUserDocs(), }, testUtils.Request{ - Request: req1, + Request: req, Results: []map[string]any{ { "name": "Islam", @@ -367,22 +545,7 @@ func TestQueryWithIndexOnOneToTwoRelation_IfFilterOnIndexedRelation_ShouldFilter }, }, testUtils.Request{ - Request: makeExplainQuery(req1), - Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), - }, - testUtils.Request{ - Request: req2, - Results: []map[string]any{ - { - "name": "Chris", - "devices": map[string]any{ - "model": "Walkman", - }, - }, - }, - }, - testUtils.Request{ - Request: makeExplainQuery(req2), + Request: makeExplainQuery(req), Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), }, }, From e4c59a965b9b864dd3759fe75836a8b6896d9688 Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Fri, 3 May 2024 22:41:57 +0200 Subject: [PATCH 48/49] test: Add more ACP integration tests (#2583) Resolves #2474 and #2475 Add integration tests for ACP on relation objects and `_avg` and `_count` methods. --- tests/integration/acp/query/avg_test.go | 98 +++++++ tests/integration/acp/query/count_test.go | 183 +++++++++++++ tests/integration/acp/query/fixture.go | 148 +++++++++++ .../acp/query/relation_objects_test.go | 242 ++++++++++++++++++ 4 files changed, 671 insertions(+) create mode 100644 tests/integration/acp/query/avg_test.go create mode 100644 tests/integration/acp/query/count_test.go create mode 100644 tests/integration/acp/query/fixture.go create mode 100644 tests/integration/acp/query/relation_objects_test.go diff --git a/tests/integration/acp/query/avg_test.go b/tests/integration/acp/query/avg_test.go new file mode 100644 index 0000000000..cd540f83ad --- /dev/null +++ b/tests/integration/acp/query/avg_test.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +func TestACP_QueryAverageWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query average without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + _avg(Employee: {field: salary}) + } + `, + Results: []map[string]any{ + { + // 2 public employees, 1 with salary 10k, 1 with salary 20k + "_avg": int(15000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryAverageWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query average with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + _avg(Employee: {field: salary}) + } + `, + Results: []map[string]any{ + { + // 4 employees with salaries 10k, 20k, 30k, 40k + "_avg": int(25000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryAverageWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query average without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + _avg(Employee: {field: salary}) + } + `, + Results: []map[string]any{ + { + // 2 public employees, 1 with salary 10k, 1 with salary 20k + "_avg": int(15000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/query/count_test.go b/tests/integration/acp/query/count_test.go new file mode 100644 index 0000000000..74c4025c22 --- /dev/null +++ b/tests/integration/acp/query/count_test.go @@ -0,0 +1,183 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +func TestACP_QueryCountDocumentsWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query documents' count without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + _count(Employee: {}) + } + `, + Results: []map[string]any{ + { + "_count": int(2), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountRelatedObjectsWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query count of related objects without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + Company { + _count(employees: {}) + } + } + `, + Results: []map[string]any{ + { + // 1 of 2 companies is public and has 1 public employee out of 2 + "_count": int(1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountDocumentsWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query documents' count with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + _count(Employee: {}) + } + `, + Results: []map[string]any{ + { + "_count": int(4), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountRelatedObjectsWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query count of related objects with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + Company { + _count(employees: {}) + } + } + `, + Results: []map[string]any{ + { + "_count": int(2), + }, + { + "_count": int(2), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountDocumentsWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query documents' count without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + _count(Employee: {}) + } + `, + Results: []map[string]any{ + { + "_count": int(2), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryCountRelatedObjectsWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query count of related objects without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + Company { + _count(employees: {}) + } + } + `, + Results: []map[string]any{ + { + // 1 of 2 companies is public and has 1 public employee out of 2 + "_count": int(1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/query/fixture.go b/tests/integration/acp/query/fixture.go new file mode 100644 index 0000000000..ed81ed0633 --- /dev/null +++ b/tests/integration/acp/query/fixture.go @@ -0,0 +1,148 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +const employeeCompanyPolicy = ` +description: A Valid DefraDB Policy Interface (DPI) + +actor: + name: actor + +resources: + employees: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + + companies: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor +` + +func getSetupEmployeeCompanyActions() []any { + return []any{ + testUtils.AddPolicy{ + Identity: acpUtils.Actor1Identity, + Policy: employeeCompanyPolicy, + ExpectedPolicyID: "67607eb2a2a873f4a69eb6876323cee7601d8a4d4fedcc18154aaee65cf38e7f", + }, + + testUtils.SchemaUpdate{ + Schema: ` + type Employee @policy( + id: "67607eb2a2a873f4a69eb6876323cee7601d8a4d4fedcc18154aaee65cf38e7f", + resource: "employees" + ) { + name: String + salary: Int + company: Company + } + + type Company @policy( + id: "67607eb2a2a873f4a69eb6876323cee7601d8a4d4fedcc18154aaee65cf38e7f", + resource: "companies" + ) { + name: String + capital: Int + employees: [Employee] + } + `, + }, + + testUtils.CreateDoc{ + CollectionID: 1, + Doc: ` + { + "name": "Public Company", + "capital": 100000 + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Identity: acpUtils.Actor1Identity, + Doc: ` + { + "name": "Private Company", + "capital": 200000 + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "PubEmp in PubCompany", + "salary": 10000, + "company": "bae-1ab7ac86-3c68-5abb-b526-803858c9dccf" + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "PubEmp in PrivateCompany", + "salary": 20000, + "company": "bae-4aef4bd6-e2ee-5075-85a5-4d64bbf80bca" + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Identity: acpUtils.Actor1Identity, + Doc: ` + { + "name": "PrivateEmp in PubCompany", + "salary": 30000, + "company": "bae-1ab7ac86-3c68-5abb-b526-803858c9dccf" + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Identity: acpUtils.Actor1Identity, + Doc: ` + { + "name": "PrivateEmp in PrivateCompany", + "salary": 40000, + "company": "bae-4aef4bd6-e2ee-5075-85a5-4d64bbf80bca" + } + `, + }, + } +} diff --git a/tests/integration/acp/query/relation_objects_test.go b/tests/integration/acp/query/relation_objects_test.go new file mode 100644 index 0000000000..76bd264ac8 --- /dev/null +++ b/tests/integration/acp/query/relation_objects_test.go @@ -0,0 +1,242 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + acpUtils "github.com/sourcenetwork/defradb/tests/integration/acp" +) + +func TestACP_QueryManyToOneRelationObjectsWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query employees with their companies without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + Employee { + name + company { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "PubEmp in PrivateCompany", + "company": nil, + }, + { + "name": "PubEmp in PubCompany", + "company": map[string]any{"name": "Public Company"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryOneToManyRelationObjectsWithoutIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query companies with their employees without identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Request: ` + query { + Company { + name + employees { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "Public Company", + "employees": []map[string]any{ + {"name": "PubEmp in PubCompany"}, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryManyToOneRelationObjectsWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query employees with their companies with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + Employee { + name + company { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "PrivateEmp in PubCompany", + "company": map[string]any{"name": "Public Company"}, + }, + { + "name": "PrivateEmp in PrivateCompany", + "company": map[string]any{"name": "Private Company"}, + }, + { + "name": "PubEmp in PrivateCompany", + "company": map[string]any{"name": "Private Company"}, + }, + { + "name": "PubEmp in PubCompany", + "company": map[string]any{"name": "Public Company"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryOneToManyRelationObjectsWithIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query companies with their employees with identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor1Identity, + Request: ` + query { + Company { + name + employees { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "Public Company", + "employees": []map[string]any{ + {"name": "PrivateEmp in PubCompany"}, + {"name": "PubEmp in PubCompany"}, + }, + }, + { + "name": "Private Company", + "employees": []map[string]any{ + {"name": "PrivateEmp in PrivateCompany"}, + {"name": "PubEmp in PrivateCompany"}, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryManyToOneRelationObjectsWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query employees with their companies with wrong identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + Employee { + name + company { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "PubEmp in PrivateCompany", + "company": nil, + }, + { + "name": "PubEmp in PubCompany", + "company": map[string]any{"name": "Public Company"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_QueryOneToManyRelationObjectsWithWrongIdentity(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test acp, query companies with their employees with wrong identity", + + Actions: []any{ + getSetupEmployeeCompanyActions(), + + testUtils.Request{ + Identity: acpUtils.Actor2Identity, + Request: ` + query { + Company { + name + employees { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "Public Company", + "employees": []map[string]any{ + {"name": "PubEmp in PubCompany"}, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 33d36c7a67f6635d9cc242a146958e5726c2178c Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Fri, 3 May 2024 20:07:13 -0400 Subject: [PATCH 49/49] Release v0.11.0 --- CHANGELOG.md | 67 ++++ README.md | 5 +- client/mocks/collection.go | 333 ------------------ client/mocks/db.go | 29 +- db/fetcher/mocks/fetcher.go | 16 +- docs/cli/defradb.md | 1 + docs/cli/defradb_client.md | 1 + docs/cli/defradb_client_acp.md | 1 + docs/cli/defradb_client_acp_policy.md | 1 + docs/cli/defradb_client_acp_policy_add.md | 1 + docs/cli/defradb_client_backup.md | 1 + docs/cli/defradb_client_backup_export.md | 1 + docs/cli/defradb_client_backup_import.md | 1 + docs/cli/defradb_client_collection.md | 1 + docs/cli/defradb_client_collection_create.md | 1 + docs/cli/defradb_client_collection_delete.md | 1 + .../cli/defradb_client_collection_describe.md | 1 + docs/cli/defradb_client_collection_docIDs.md | 1 + docs/cli/defradb_client_collection_get.md | 1 + docs/cli/defradb_client_collection_patch.md | 1 + docs/cli/defradb_client_collection_update.md | 1 + docs/cli/defradb_client_dump.md | 1 + docs/cli/defradb_client_index.md | 1 + docs/cli/defradb_client_index_create.md | 1 + docs/cli/defradb_client_index_drop.md | 1 + docs/cli/defradb_client_index_list.md | 1 + docs/cli/defradb_client_p2p.md | 1 + docs/cli/defradb_client_p2p_collection.md | 1 + docs/cli/defradb_client_p2p_collection_add.md | 1 + .../defradb_client_p2p_collection_getall.md | 1 + .../defradb_client_p2p_collection_remove.md | 1 + docs/cli/defradb_client_p2p_info.md | 1 + docs/cli/defradb_client_p2p_replicator.md | 1 + .../defradb_client_p2p_replicator_delete.md | 1 + .../defradb_client_p2p_replicator_getall.md | 1 + docs/cli/defradb_client_p2p_replicator_set.md | 1 + docs/cli/defradb_client_query.md | 1 + docs/cli/defradb_client_schema.md | 1 + docs/cli/defradb_client_schema_add.md | 1 + docs/cli/defradb_client_schema_describe.md | 1 + docs/cli/defradb_client_schema_migration.md | 1 + .../defradb_client_schema_migration_down.md | 1 + .../defradb_client_schema_migration_reload.md | 1 + ...db_client_schema_migration_set-registry.md | 1 + .../defradb_client_schema_migration_set.md | 1 + .../cli/defradb_client_schema_migration_up.md | 1 + docs/cli/defradb_client_schema_patch.md | 1 + docs/cli/defradb_client_schema_set-active.md | 1 + docs/cli/defradb_client_tx.md | 1 + docs/cli/defradb_client_tx_commit.md | 1 + docs/cli/defradb_client_tx_create.md | 1 + docs/cli/defradb_client_tx_discard.md | 1 + docs/cli/defradb_client_view.md | 1 + docs/cli/defradb_client_view_add.md | 1 + docs/cli/defradb_server-dump.md | 1 + docs/cli/defradb_start.md | 1 + docs/cli/defradb_version.md | 1 + licenses/BSL.txt | 4 +- 58 files changed, 146 insertions(+), 360 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 342cfb3a53..7345a58cc8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,70 @@ + + +## [v0.11.0](https://github.com/sourcenetwork/defradb/compare/v0.10.0...v0.11.0) + +> 2024-05-03 + +DefraDB v0.11 is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes. + +To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.10.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. + +### Features + +* Update corelog to 0.0.7 ([#2547](https://github.com/sourcenetwork/defradb/issues/2547)) +* Move relation field properties onto collection ([#2529](https://github.com/sourcenetwork/defradb/issues/2529)) +* Lens runtime config ([#2497](https://github.com/sourcenetwork/defradb/issues/2497)) +* Add P Counter CRDT ([#2482](https://github.com/sourcenetwork/defradb/issues/2482)) +* Add Access Control Policy ([#2338](https://github.com/sourcenetwork/defradb/issues/2338)) +* Force explicit primary decl. in SDL for one-ones ([#2462](https://github.com/sourcenetwork/defradb/issues/2462)) +* Allow mutation of col sources via PatchCollection ([#2424](https://github.com/sourcenetwork/defradb/issues/2424)) +* Add Defra-Lens support for branching schema ([#2421](https://github.com/sourcenetwork/defradb/issues/2421)) +* Add PatchCollection ([#2402](https://github.com/sourcenetwork/defradb/issues/2402)) + +### Fixes + +* Return correct results from one-many indexed filter ([#2579](https://github.com/sourcenetwork/defradb/issues/2579)) +* Handle compound filters on related indexed fields ([#2575](https://github.com/sourcenetwork/defradb/issues/2575)) +* Add check to filter result for logical ops ([#2573](https://github.com/sourcenetwork/defradb/issues/2573)) +* Make all array kinds nillable ([#2534](https://github.com/sourcenetwork/defradb/issues/2534)) +* Allow update when updating non-indexed field ([#2511](https://github.com/sourcenetwork/defradb/issues/2511)) + +### Documentation + +* Add data definition document ([#2544](https://github.com/sourcenetwork/defradb/issues/2544)) + +### Refactoring + +* Merge collection UpdateWith and DeleteWith ([#2531](https://github.com/sourcenetwork/defradb/issues/2531)) +* DB transactions context ([#2513](https://github.com/sourcenetwork/defradb/issues/2513)) +* Add NormalValue ([#2404](https://github.com/sourcenetwork/defradb/issues/2404)) +* Clean up client/request package ([#2443](https://github.com/sourcenetwork/defradb/issues/2443)) +* Rewrite convertImmutable ([#2445](https://github.com/sourcenetwork/defradb/issues/2445)) +* Unify Field Kind and Schema properties ([#2414](https://github.com/sourcenetwork/defradb/issues/2414)) +* Replace logging package with corelog ([#2406](https://github.com/sourcenetwork/defradb/issues/2406)) + +### Testing + +* Add flag to skip network tests ([#2495](https://github.com/sourcenetwork/defradb/issues/2495)) + +### Bot + +* Update dependencies (bulk dependabot PRs) 30-04-2024 ([#2570](https://github.com/sourcenetwork/defradb/issues/2570)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 7.7.0 to 7.7.1 in /playground ([#2550](https://github.com/sourcenetwork/defradb/issues/2550)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 7.7.0 to 7.7.1 in /playground ([#2551](https://github.com/sourcenetwork/defradb/issues/2551)) +* Bump swagger-ui-react from 5.16.2 to 5.17.0 in /playground ([#2549](https://github.com/sourcenetwork/defradb/issues/2549)) +* Update dependencies (bulk dependabot PRs) 23-04-2023 ([#2548](https://github.com/sourcenetwork/defradb/issues/2548)) +* Bump go.opentelemetry.io/otel/sdk/metric from 1.24.0 to 1.25.0 ([#2499](https://github.com/sourcenetwork/defradb/issues/2499)) +* Bump typescript from 5.4.3 to 5.4.5 in /playground ([#2515](https://github.com/sourcenetwork/defradb/issues/2515)) +* Bump swagger-ui-react from 5.14.0 to 5.15.0 in /playground ([#2514](https://github.com/sourcenetwork/defradb/issues/2514)) +* Update dependencies (bulk dependabot PRs) 2024-04-09 ([#2509](https://github.com/sourcenetwork/defradb/issues/2509)) +* Update dependencies (bulk dependabot PRs) 2024-04-03 ([#2492](https://github.com/sourcenetwork/defradb/issues/2492)) +* Update dependencies (bulk dependabot PRs) 03-04-2024 ([#2486](https://github.com/sourcenetwork/defradb/issues/2486)) +* Bump github.com/multiformats/go-multiaddr from 0.12.2 to 0.12.3 ([#2480](https://github.com/sourcenetwork/defradb/issues/2480)) +* Bump [@types](https://github.com/types)/react from 18.2.66 to 18.2.67 in /playground ([#2427](https://github.com/sourcenetwork/defradb/issues/2427)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 7.2.0 to 7.3.1 in /playground ([#2428](https://github.com/sourcenetwork/defradb/issues/2428)) +* Update dependencies (bulk dependabot PRs) 19-03-2024 ([#2426](https://github.com/sourcenetwork/defradb/issues/2426)) +* Update dependencies (bulk dependabot PRs) 03-11-2024 ([#2399](https://github.com/sourcenetwork/defradb/issues/2399)) + ## [v0.10.0](https://github.com/sourcenetwork/defradb/compare/v0.9.0...v0.10.0) diff --git a/README.md b/README.md index 4924170e79..220c48f842 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,6 @@ Read the documentation on [docs.source.network](https://docs.source.network/). ## Table of Contents -- [Early Access](#early-access) - [Install](#install) - [Start](#start) - [Configuration](#configuration) @@ -32,12 +31,14 @@ Read the documentation on [docs.source.network](https://docs.source.network/). - [Collection subscription example](#collection-subscription-example) - [Replicator example](#replicator-example) - [Securing the HTTP API with TLS](#securing-the-http-api-with-tls) +- [Access Control System](#access-control-system) - [Supporting CORS](#supporting-cors) - [Backing up and restoring](#backing-up-and-restoring) +- [Community](#community) - [Licensing](#licensing) - [Contributors](#contributors) -DISCLAIMER: At this early stage, DefraDB does not offer access control or data encryption, and the default configuration exposes the database to the network. The software is provided "as is" and is not guaranteed to be stable, secure, or error-free. We encourage you to experiment with DefraDB and provide feedback, but please do not use it for production purposes until it has been thoroughly tested and developed. +DISCLAIMER: At this early stage, DefraDB does not offer data encryption, and the default configuration exposes the database to the network. The software is provided "as is" and is not guaranteed to be stable, secure, or error-free. We encourage you to experiment with DefraDB and provide feedback, but please do not use it for production purposes until it has been thoroughly tested and developed. ## Install diff --git a/client/mocks/collection.go b/client/mocks/collection.go index fac90fe1e4..7c227edd2b 100644 --- a/client/mocks/collection.go +++ b/client/mocks/collection.go @@ -344,171 +344,6 @@ func (_c *Collection_DeleteDocIndex_Call) RunAndReturn(run func(context.Context, return _c } -// DeleteWith provides a mock function with given fields: ctx, target -func (_m *Collection) DeleteWith(ctx context.Context, target interface{}) (*client.DeleteResult, error) { - ret := _m.Called(ctx, target) - - var r0 *client.DeleteResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { - return rf(ctx, target) - } - if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { - r0 = rf(ctx, target) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.DeleteResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { - r1 = rf(ctx, target) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_DeleteWith_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWith' -type Collection_DeleteWith_Call struct { - *mock.Call -} - -// DeleteWith is a helper method to define mock.On call -// - ctx context.Context -// - target interface{} -func (_e *Collection_Expecter) DeleteWith(ctx interface{}, target interface{}) *Collection_DeleteWith_Call { - return &Collection_DeleteWith_Call{Call: _e.mock.On("DeleteWith", ctx, target)} -} - -func (_c *Collection_DeleteWith_Call) Run(run func(ctx context.Context, target interface{})) *Collection_DeleteWith_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{})) - }) - return _c -} - -func (_c *Collection_DeleteWith_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWith_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_DeleteWith_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWith_Call { - _c.Call.Return(run) - return _c -} - -// DeleteWithDocID provides a mock function with given fields: ctx, docID -func (_m *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { - ret := _m.Called(ctx, docID) - - var r0 *client.DeleteResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (*client.DeleteResult, error)); ok { - return rf(ctx, docID) - } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID) *client.DeleteResult); ok { - r0 = rf(ctx, docID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.DeleteResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { - r1 = rf(ctx, docID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_DeleteWithDocID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithDocID' -type Collection_DeleteWithDocID_Call struct { - *mock.Call -} - -// DeleteWithDocID is a helper method to define mock.On call -// - ctx context.Context -// - docID client.DocID -func (_e *Collection_Expecter) DeleteWithDocID(ctx interface{}, docID interface{}) *Collection_DeleteWithDocID_Call { - return &Collection_DeleteWithDocID_Call{Call: _e.mock.On("DeleteWithDocID", ctx, docID)} -} - -func (_c *Collection_DeleteWithDocID_Call) Run(run func(ctx context.Context, docID client.DocID)) *Collection_DeleteWithDocID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID)) - }) - return _c -} - -func (_c *Collection_DeleteWithDocID_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithDocID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_DeleteWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocID_Call { - _c.Call.Return(run) - return _c -} - -// DeleteWithDocIDs provides a mock function with given fields: ctx, docIDs -func (_m *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { - ret := _m.Called(ctx, docIDs) - - var r0 *client.DeleteResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) (*client.DeleteResult, error)); ok { - return rf(ctx, docIDs) - } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) *client.DeleteResult); ok { - r0 = rf(ctx, docIDs) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.DeleteResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []client.DocID) error); ok { - r1 = rf(ctx, docIDs) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_DeleteWithDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithDocIDs' -type Collection_DeleteWithDocIDs_Call struct { - *mock.Call -} - -// DeleteWithDocIDs is a helper method to define mock.On call -// - ctx context.Context -// - docIDs []client.DocID -func (_e *Collection_Expecter) DeleteWithDocIDs(ctx interface{}, docIDs interface{}) *Collection_DeleteWithDocIDs_Call { - return &Collection_DeleteWithDocIDs_Call{Call: _e.mock.On("DeleteWithDocIDs", ctx, docIDs)} -} - -func (_c *Collection_DeleteWithDocIDs_Call) Run(run func(ctx context.Context, docIDs []client.DocID)) *Collection_DeleteWithDocIDs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocID)) - }) - return _c -} - -func (_c *Collection_DeleteWithDocIDs_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithDocIDs_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_DeleteWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocIDs_Call { - _c.Call.Return(run) - return _c -} - // DeleteWithFilter provides a mock function with given fields: ctx, filter func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) (*client.DeleteResult, error) { ret := _m.Called(ctx, filter) @@ -1159,174 +994,6 @@ func (_c *Collection_UpdateDocIndex_Call) RunAndReturn(run func(context.Context, return _c } -// UpdateWith provides a mock function with given fields: ctx, target, updater -func (_m *Collection) UpdateWith(ctx context.Context, target interface{}, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, target, updater) - - var r0 *client.UpdateResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { - return rf(ctx, target, updater) - } - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { - r0 = rf(ctx, target, updater) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.UpdateResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { - r1 = rf(ctx, target, updater) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_UpdateWith_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWith' -type Collection_UpdateWith_Call struct { - *mock.Call -} - -// UpdateWith is a helper method to define mock.On call -// - ctx context.Context -// - target interface{} -// - updater string -func (_e *Collection_Expecter) UpdateWith(ctx interface{}, target interface{}, updater interface{}) *Collection_UpdateWith_Call { - return &Collection_UpdateWith_Call{Call: _e.mock.On("UpdateWith", ctx, target, updater)} -} - -func (_c *Collection_UpdateWith_Call) Run(run func(ctx context.Context, target interface{}, updater string)) *Collection_UpdateWith_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) - }) - return _c -} - -func (_c *Collection_UpdateWith_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWith_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_UpdateWith_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWith_Call { - _c.Call.Return(run) - return _c -} - -// UpdateWithDocID provides a mock function with given fields: ctx, docID, updater -func (_m *Collection) UpdateWithDocID(ctx context.Context, docID client.DocID, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, docID, updater) - - var r0 *client.UpdateResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) (*client.UpdateResult, error)); ok { - return rf(ctx, docID, updater) - } - if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) *client.UpdateResult); ok { - r0 = rf(ctx, docID, updater) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.UpdateResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, client.DocID, string) error); ok { - r1 = rf(ctx, docID, updater) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_UpdateWithDocID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithDocID' -type Collection_UpdateWithDocID_Call struct { - *mock.Call -} - -// UpdateWithDocID is a helper method to define mock.On call -// - ctx context.Context -// - docID client.DocID -// - updater string -func (_e *Collection_Expecter) UpdateWithDocID(ctx interface{}, docID interface{}, updater interface{}) *Collection_UpdateWithDocID_Call { - return &Collection_UpdateWithDocID_Call{Call: _e.mock.On("UpdateWithDocID", ctx, docID, updater)} -} - -func (_c *Collection_UpdateWithDocID_Call) Run(run func(ctx context.Context, docID client.DocID, updater string)) *Collection_UpdateWithDocID_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocID), args[2].(string)) - }) - return _c -} - -func (_c *Collection_UpdateWithDocID_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithDocID_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_UpdateWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocID_Call { - _c.Call.Return(run) - return _c -} - -// UpdateWithDocIDs provides a mock function with given fields: ctx, docIDs, updater -func (_m *Collection) UpdateWithDocIDs(ctx context.Context, docIDs []client.DocID, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, docIDs, updater) - - var r0 *client.UpdateResult - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) (*client.UpdateResult, error)); ok { - return rf(ctx, docIDs, updater) - } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) *client.UpdateResult); ok { - r0 = rf(ctx, docIDs, updater) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.UpdateResult) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []client.DocID, string) error); ok { - r1 = rf(ctx, docIDs, updater) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Collection_UpdateWithDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithDocIDs' -type Collection_UpdateWithDocIDs_Call struct { - *mock.Call -} - -// UpdateWithDocIDs is a helper method to define mock.On call -// - ctx context.Context -// - docIDs []client.DocID -// - updater string -func (_e *Collection_Expecter) UpdateWithDocIDs(ctx interface{}, docIDs interface{}, updater interface{}) *Collection_UpdateWithDocIDs_Call { - return &Collection_UpdateWithDocIDs_Call{Call: _e.mock.On("UpdateWithDocIDs", ctx, docIDs, updater)} -} - -func (_c *Collection_UpdateWithDocIDs_Call) Run(run func(ctx context.Context, docIDs []client.DocID, updater string)) *Collection_UpdateWithDocIDs_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocID), args[2].(string)) - }) - return _c -} - -func (_c *Collection_UpdateWithDocIDs_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithDocIDs_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Collection_UpdateWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocIDs_Call { - _c.Call.Return(run) - return _c -} - // UpdateWithFilter provides a mock function with given fields: ctx, filter, updater func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, updater string) (*client.UpdateResult, error) { ret := _m.Called(ctx, filter, updater) diff --git a/client/mocks/db.go b/client/mocks/db.go index 31c44c1241..20b5988fe7 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -32,23 +32,23 @@ func (_m *DB) EXPECT() *DB_Expecter { return &DB_Expecter{mock: &_m.Mock} } -// AddPolicy provides a mock function with given fields: ctx, creatorID, policy -func (_m *DB) AddPolicy(ctx context.Context, creatorID string, policy string) (client.AddPolicyResult, error) { - ret := _m.Called(ctx, creatorID, policy) +// AddPolicy provides a mock function with given fields: ctx, policy +func (_m *DB) AddPolicy(ctx context.Context, policy string) (client.AddPolicyResult, error) { + ret := _m.Called(ctx, policy) var r0 client.AddPolicyResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string, string) (client.AddPolicyResult, error)); ok { - return rf(ctx, creatorID, policy) + if rf, ok := ret.Get(0).(func(context.Context, string) (client.AddPolicyResult, error)); ok { + return rf(ctx, policy) } - if rf, ok := ret.Get(0).(func(context.Context, string, string) client.AddPolicyResult); ok { - r0 = rf(ctx, creatorID, policy) + if rf, ok := ret.Get(0).(func(context.Context, string) client.AddPolicyResult); ok { + r0 = rf(ctx, policy) } else { r0 = ret.Get(0).(client.AddPolicyResult) } - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, creatorID, policy) + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, policy) } else { r1 = ret.Error(1) } @@ -63,15 +63,14 @@ type DB_AddPolicy_Call struct { // AddPolicy is a helper method to define mock.On call // - ctx context.Context -// - creatorID string // - policy string -func (_e *DB_Expecter) AddPolicy(ctx interface{}, creatorID interface{}, policy interface{}) *DB_AddPolicy_Call { - return &DB_AddPolicy_Call{Call: _e.mock.On("AddPolicy", ctx, creatorID, policy)} +func (_e *DB_Expecter) AddPolicy(ctx interface{}, policy interface{}) *DB_AddPolicy_Call { + return &DB_AddPolicy_Call{Call: _e.mock.On("AddPolicy", ctx, policy)} } -func (_c *DB_AddPolicy_Call) Run(run func(ctx context.Context, creatorID string, policy string)) *DB_AddPolicy_Call { +func (_c *DB_AddPolicy_Call) Run(run func(ctx context.Context, policy string)) *DB_AddPolicy_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string), args[2].(string)) + run(args[0].(context.Context), args[1].(string)) }) return _c } @@ -81,7 +80,7 @@ func (_c *DB_AddPolicy_Call) Return(_a0 client.AddPolicyResult, _a1 error) *DB_A return _c } -func (_c *DB_AddPolicy_Call) RunAndReturn(run func(context.Context, string, string) (client.AddPolicyResult, error)) *DB_AddPolicy_Call { +func (_c *DB_AddPolicy_Call) RunAndReturn(run func(context.Context, string) (client.AddPolicyResult, error)) *DB_AddPolicy_Call { _c.Call.Return(run) return _c } diff --git a/db/fetcher/mocks/fetcher.go b/db/fetcher/mocks/fetcher.go index 44421230a0..4f537aefea 100644 --- a/db/fetcher/mocks/fetcher.go +++ b/db/fetcher/mocks/fetcher.go @@ -138,13 +138,13 @@ func (_c *Fetcher_FetchNext_Call) RunAndReturn(run func(context.Context) (fetche return _c } -// Init provides a mock function with given fields: ctx, id, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted -func (_m *Fetcher) Init(ctx context.Context, id immutable.Option[identity.Identity], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool) error { - ret := _m.Called(ctx, id, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) +// Init provides a mock function with given fields: ctx, _a1, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted +func (_m *Fetcher) Init(ctx context.Context, _a1 immutable.Option[identity.Identity], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool) error { + ret := _m.Called(ctx, _a1, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) var r0 error if rf, ok := ret.Get(0).(func(context.Context, immutable.Option[identity.Identity], datastore.Txn, immutable.Option[acp.ACP], client.Collection, []client.FieldDefinition, *mapper.Filter, *core.DocumentMapping, bool, bool) error); ok { - r0 = rf(ctx, id, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) + r0 = rf(ctx, _a1, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted) } else { r0 = ret.Error(0) } @@ -159,7 +159,7 @@ type Fetcher_Init_Call struct { // Init is a helper method to define mock.On call // - ctx context.Context -// - id immutable.Option[identity.Identity] +// - _a1 immutable.Option[identity.Identity] // - txn datastore.Txn // - _a3 immutable.Option[acp.ACP] // - col client.Collection @@ -168,11 +168,11 @@ type Fetcher_Init_Call struct { // - docmapper *core.DocumentMapping // - reverse bool // - showDeleted bool -func (_e *Fetcher_Expecter) Init(ctx interface{}, id interface{}, txn interface{}, _a3 interface{}, col interface{}, fields interface{}, filter interface{}, docmapper interface{}, reverse interface{}, showDeleted interface{}) *Fetcher_Init_Call { - return &Fetcher_Init_Call{Call: _e.mock.On("Init", ctx, id, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted)} +func (_e *Fetcher_Expecter) Init(ctx interface{}, _a1 interface{}, txn interface{}, _a3 interface{}, col interface{}, fields interface{}, filter interface{}, docmapper interface{}, reverse interface{}, showDeleted interface{}) *Fetcher_Init_Call { + return &Fetcher_Init_Call{Call: _e.mock.On("Init", ctx, _a1, txn, _a3, col, fields, filter, docmapper, reverse, showDeleted)} } -func (_c *Fetcher_Init_Call) Run(run func(ctx context.Context, id immutable.Option[identity.Identity], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool)) *Fetcher_Init_Call { +func (_c *Fetcher_Init_Call) Run(run func(ctx context.Context, _a1 immutable.Option[identity.Identity], txn datastore.Txn, _a3 immutable.Option[acp.ACP], col client.Collection, fields []client.FieldDefinition, filter *mapper.Filter, docmapper *core.DocumentMapping, reverse bool, showDeleted bool)) *Fetcher_Init_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(immutable.Option[identity.Identity]), args[2].(datastore.Txn), args[3].(immutable.Option[acp.ACP]), args[4].(client.Collection), args[5].([]client.FieldDefinition), args[6].(*mapper.Filter), args[7].(*core.DocumentMapping), args[8].(bool), args[9].(bool)) }) diff --git a/docs/cli/defradb.md b/docs/cli/defradb.md index 00b29d7392..602206e575 100644 --- a/docs/cli/defradb.md +++ b/docs/cli/defradb.md @@ -16,6 +16,7 @@ Start a DefraDB node, interact with a local or remote node, and much more. -h, --help help for defradb --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client.md b/docs/cli/defradb_client.md index e799d5b4f7..302e171dd3 100644 --- a/docs/cli/defradb_client.md +++ b/docs/cli/defradb_client.md @@ -21,6 +21,7 @@ Execute queries, add schema types, obtain node info, etc. --allowed-origins stringArray List of origins to allow for CORS requests --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_acp.md b/docs/cli/defradb_client_acp.md index 43d1cae8ac..d3f57ae230 100644 --- a/docs/cli/defradb_client_acp.md +++ b/docs/cli/defradb_client_acp.md @@ -23,6 +23,7 @@ Learn more about [ACP](/acp/README.md) -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_acp_policy.md b/docs/cli/defradb_client_acp_policy.md index 54445062c1..2e659a0eb4 100644 --- a/docs/cli/defradb_client_acp_policy.md +++ b/docs/cli/defradb_client_acp_policy.md @@ -19,6 +19,7 @@ Interact with the acp policy features of DefraDB instance -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_acp_policy_add.md b/docs/cli/defradb_client_acp_policy_add.md index 65f3aa68d8..f426909323 100644 --- a/docs/cli/defradb_client_acp_policy_add.md +++ b/docs/cli/defradb_client_acp_policy_add.md @@ -67,6 +67,7 @@ defradb client acp policy add [-i --identity] [policy] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_backup.md b/docs/cli/defradb_client_backup.md index 6fdd82b1cf..ffa879365c 100644 --- a/docs/cli/defradb_client_backup.md +++ b/docs/cli/defradb_client_backup.md @@ -20,6 +20,7 @@ Currently only supports JSON format. -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_backup_export.md b/docs/cli/defradb_client_backup_export.md index e950883096..fc05e8ee14 100644 --- a/docs/cli/defradb_client_backup_export.md +++ b/docs/cli/defradb_client_backup_export.md @@ -34,6 +34,7 @@ defradb client backup export [-c --collections | -p --pretty | -f --format] ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_backup_import.md b/docs/cli/defradb_client_backup_import.md index 1865785bde..373f5be89c 100644 --- a/docs/cli/defradb_client_backup_import.md +++ b/docs/cli/defradb_client_backup_import.md @@ -26,6 +26,7 @@ defradb client backup import [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_collection.md b/docs/cli/defradb_client_collection.md index 94c5f5975a..59faa94f78 100644 --- a/docs/cli/defradb_client_collection.md +++ b/docs/cli/defradb_client_collection.md @@ -24,6 +24,7 @@ Create, read, update, and delete documents within a collection. --allowed-origins stringArray List of origins to allow for CORS requests --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_collection_create.md b/docs/cli/defradb_client_collection_create.md index daaaa2902f..b565c2a547 100644 --- a/docs/cli/defradb_client_collection_create.md +++ b/docs/cli/defradb_client_collection_create.md @@ -41,6 +41,7 @@ defradb client collection create [-i --identity] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_collection_delete.md b/docs/cli/defradb_client_collection_delete.md index 110654ff5d..2bca8d7d8a 100644 --- a/docs/cli/defradb_client_collection_delete.md +++ b/docs/cli/defradb_client_collection_delete.md @@ -36,6 +36,7 @@ defradb client collection delete [-i --identity] [--filter --docID ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_collection_describe.md b/docs/cli/defradb_client_collection_describe.md index 184a0bd723..bea05a1321 100644 --- a/docs/cli/defradb_client_collection_describe.md +++ b/docs/cli/defradb_client_collection_describe.md @@ -40,6 +40,7 @@ defradb client collection describe [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_collection_docIDs.md b/docs/cli/defradb_client_collection_docIDs.md index 1f9de21946..1cf1a8444a 100644 --- a/docs/cli/defradb_client_collection_docIDs.md +++ b/docs/cli/defradb_client_collection_docIDs.md @@ -31,6 +31,7 @@ defradb client collection docIDs [-i --identity] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_collection_get.md b/docs/cli/defradb_client_collection_get.md index 34724bea54..7b80a2a54b 100644 --- a/docs/cli/defradb_client_collection_get.md +++ b/docs/cli/defradb_client_collection_get.md @@ -32,6 +32,7 @@ defradb client collection get [-i --identity] [--show-deleted] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_collection_patch.md b/docs/cli/defradb_client_collection_patch.md index 25cff2d710..c8540aa397 100644 --- a/docs/cli/defradb_client_collection_patch.md +++ b/docs/cli/defradb_client_collection_patch.md @@ -38,6 +38,7 @@ defradb client collection patch [patch] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_collection_update.md b/docs/cli/defradb_client_collection_update.md index 19533da6b3..ab6b8999b0 100644 --- a/docs/cli/defradb_client_collection_update.md +++ b/docs/cli/defradb_client_collection_update.md @@ -43,6 +43,7 @@ defradb client collection update [-i --identity] [--filter --docID ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_dump.md b/docs/cli/defradb_client_dump.md index e488234c5f..a819df1514 100644 --- a/docs/cli/defradb_client_dump.md +++ b/docs/cli/defradb_client_dump.md @@ -19,6 +19,7 @@ defradb client dump [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_index.md b/docs/cli/defradb_client_index.md index e4577d3c10..bb59a6373b 100644 --- a/docs/cli/defradb_client_index.md +++ b/docs/cli/defradb_client_index.md @@ -19,6 +19,7 @@ Manage (create, drop, or list) collection indexes on a DefraDB node. -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_index_create.md b/docs/cli/defradb_client_index_create.md index dffc32822c..8c365e348e 100644 --- a/docs/cli/defradb_client_index_create.md +++ b/docs/cli/defradb_client_index_create.md @@ -36,6 +36,7 @@ defradb client index create -c --collection --fields [-n - -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_index_drop.md b/docs/cli/defradb_client_index_drop.md index 7e91607c3a..03b206c6cb 100644 --- a/docs/cli/defradb_client_index_drop.md +++ b/docs/cli/defradb_client_index_drop.md @@ -28,6 +28,7 @@ defradb client index drop -c --collection -n --name [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_index_list.md b/docs/cli/defradb_client_index_list.md index 563b622cde..3c776f73ac 100644 --- a/docs/cli/defradb_client_index_list.md +++ b/docs/cli/defradb_client_index_list.md @@ -30,6 +30,7 @@ defradb client index list [-c --collection ] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p.md b/docs/cli/defradb_client_p2p.md index 5ac781aa5c..2506208717 100644 --- a/docs/cli/defradb_client_p2p.md +++ b/docs/cli/defradb_client_p2p.md @@ -19,6 +19,7 @@ Interact with the DefraDB P2P system -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_collection.md b/docs/cli/defradb_client_p2p_collection.md index f664c94414..a1de966445 100644 --- a/docs/cli/defradb_client_p2p_collection.md +++ b/docs/cli/defradb_client_p2p_collection.md @@ -20,6 +20,7 @@ The selected collections synchronize their events on the pubsub network. -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_collection_add.md b/docs/cli/defradb_client_p2p_collection_add.md index 5a45967831..01bc79ca0f 100644 --- a/docs/cli/defradb_client_p2p_collection_add.md +++ b/docs/cli/defradb_client_p2p_collection_add.md @@ -31,6 +31,7 @@ defradb client p2p collection add [collectionIDs] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_collection_getall.md b/docs/cli/defradb_client_p2p_collection_getall.md index 66b8f919c6..8d10944ad2 100644 --- a/docs/cli/defradb_client_p2p_collection_getall.md +++ b/docs/cli/defradb_client_p2p_collection_getall.md @@ -24,6 +24,7 @@ defradb client p2p collection getall [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_collection_remove.md b/docs/cli/defradb_client_p2p_collection_remove.md index e8f8320222..1cd6a14ee9 100644 --- a/docs/cli/defradb_client_p2p_collection_remove.md +++ b/docs/cli/defradb_client_p2p_collection_remove.md @@ -31,6 +31,7 @@ defradb client p2p collection remove [collectionIDs] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_info.md b/docs/cli/defradb_client_p2p_info.md index 7a84f1152f..385780ad3d 100644 --- a/docs/cli/defradb_client_p2p_info.md +++ b/docs/cli/defradb_client_p2p_info.md @@ -23,6 +23,7 @@ defradb client p2p info [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_replicator.md b/docs/cli/defradb_client_p2p_replicator.md index 3dbe42b935..b9d5b561c7 100644 --- a/docs/cli/defradb_client_p2p_replicator.md +++ b/docs/cli/defradb_client_p2p_replicator.md @@ -20,6 +20,7 @@ A replicator replicates one or all collection(s) from one node to another. -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_replicator_delete.md b/docs/cli/defradb_client_p2p_replicator_delete.md index 88c68fa6b4..93e5ff6d95 100644 --- a/docs/cli/defradb_client_p2p_replicator_delete.md +++ b/docs/cli/defradb_client_p2p_replicator_delete.md @@ -29,6 +29,7 @@ defradb client p2p replicator delete [-c, --collection] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_replicator_getall.md b/docs/cli/defradb_client_p2p_replicator_getall.md index 38c015f24d..cc9cc1ed63 100644 --- a/docs/cli/defradb_client_p2p_replicator_getall.md +++ b/docs/cli/defradb_client_p2p_replicator_getall.md @@ -28,6 +28,7 @@ defradb client p2p replicator getall [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_p2p_replicator_set.md b/docs/cli/defradb_client_p2p_replicator_set.md index 24e514250a..4fbc980a7c 100644 --- a/docs/cli/defradb_client_p2p_replicator_set.md +++ b/docs/cli/defradb_client_p2p_replicator_set.md @@ -29,6 +29,7 @@ defradb client p2p replicator set [-c, --collection] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_query.md b/docs/cli/defradb_client_query.md index cd53fcaf48..493acca2d4 100644 --- a/docs/cli/defradb_client_query.md +++ b/docs/cli/defradb_client_query.md @@ -41,6 +41,7 @@ defradb client query [-i --identity] [request] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema.md b/docs/cli/defradb_client_schema.md index 569d8e95f9..2e144a89e6 100644 --- a/docs/cli/defradb_client_schema.md +++ b/docs/cli/defradb_client_schema.md @@ -19,6 +19,7 @@ Make changes, updates, or look for existing schema types. -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_add.md b/docs/cli/defradb_client_schema_add.md index e425110a1b..0ff3f683f4 100644 --- a/docs/cli/defradb_client_schema_add.md +++ b/docs/cli/defradb_client_schema_add.md @@ -40,6 +40,7 @@ defradb client schema add [schema] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_describe.md b/docs/cli/defradb_client_schema_describe.md index cf94bed188..0b28a1e64e 100644 --- a/docs/cli/defradb_client_schema_describe.md +++ b/docs/cli/defradb_client_schema_describe.md @@ -39,6 +39,7 @@ defradb client schema describe [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_migration.md b/docs/cli/defradb_client_schema_migration.md index 3f4640a672..c339763571 100644 --- a/docs/cli/defradb_client_schema_migration.md +++ b/docs/cli/defradb_client_schema_migration.md @@ -19,6 +19,7 @@ Make set or look for existing schema migrations on a DefraDB node. -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_migration_down.md b/docs/cli/defradb_client_schema_migration_down.md index 72cd07b96e..f741f5bec9 100644 --- a/docs/cli/defradb_client_schema_migration_down.md +++ b/docs/cli/defradb_client_schema_migration_down.md @@ -36,6 +36,7 @@ defradb client schema migration down --collection [fl -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_migration_reload.md b/docs/cli/defradb_client_schema_migration_reload.md index 8e56cd1a0c..8a1d8480c0 100644 --- a/docs/cli/defradb_client_schema_migration_reload.md +++ b/docs/cli/defradb_client_schema_migration_reload.md @@ -23,6 +23,7 @@ defradb client schema migration reload [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_migration_set-registry.md b/docs/cli/defradb_client_schema_migration_set-registry.md index cf0b117b30..ebb4c625c7 100644 --- a/docs/cli/defradb_client_schema_migration_set-registry.md +++ b/docs/cli/defradb_client_schema_migration_set-registry.md @@ -29,6 +29,7 @@ defradb client schema migration set-registry [collectionID] [cfg] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_migration_set.md b/docs/cli/defradb_client_schema_migration_set.md index 135e39308b..8386fd8369 100644 --- a/docs/cli/defradb_client_schema_migration_set.md +++ b/docs/cli/defradb_client_schema_migration_set.md @@ -36,6 +36,7 @@ defradb client schema migration set [src] [dst] [cfg] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_migration_up.md b/docs/cli/defradb_client_schema_migration_up.md index 863d033f91..b55ace45ad 100644 --- a/docs/cli/defradb_client_schema_migration_up.md +++ b/docs/cli/defradb_client_schema_migration_up.md @@ -36,6 +36,7 @@ defradb client schema migration up --collection [flag -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_patch.md b/docs/cli/defradb_client_schema_patch.md index 796604ff4b..7d16e632ae 100644 --- a/docs/cli/defradb_client_schema_patch.md +++ b/docs/cli/defradb_client_schema_patch.md @@ -39,6 +39,7 @@ defradb client schema patch [schema] [migration] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_schema_set-active.md b/docs/cli/defradb_client_schema_set-active.md index 909e54a925..7f7b4f4cd5 100644 --- a/docs/cli/defradb_client_schema_set-active.md +++ b/docs/cli/defradb_client_schema_set-active.md @@ -24,6 +24,7 @@ defradb client schema set-active [versionID] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_tx.md b/docs/cli/defradb_client_tx.md index f1e38f6b6c..67bf63e2df 100644 --- a/docs/cli/defradb_client_tx.md +++ b/docs/cli/defradb_client_tx.md @@ -19,6 +19,7 @@ Create, commit, and discard DefraDB transactions -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_tx_commit.md b/docs/cli/defradb_client_tx_commit.md index 536fdd1284..eba408dc57 100644 --- a/docs/cli/defradb_client_tx_commit.md +++ b/docs/cli/defradb_client_tx_commit.md @@ -23,6 +23,7 @@ defradb client tx commit [id] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_tx_create.md b/docs/cli/defradb_client_tx_create.md index 7a986e416a..26668e6ad2 100644 --- a/docs/cli/defradb_client_tx_create.md +++ b/docs/cli/defradb_client_tx_create.md @@ -25,6 +25,7 @@ defradb client tx create [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_tx_discard.md b/docs/cli/defradb_client_tx_discard.md index fc0bb2a8db..3989bc4c05 100644 --- a/docs/cli/defradb_client_tx_discard.md +++ b/docs/cli/defradb_client_tx_discard.md @@ -23,6 +23,7 @@ defradb client tx discard [id] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_view.md b/docs/cli/defradb_client_view.md index 3c77088c29..09c5bab11b 100644 --- a/docs/cli/defradb_client_view.md +++ b/docs/cli/defradb_client_view.md @@ -19,6 +19,7 @@ Manage (add) views withing a running DefraDB instance -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_client_view_add.md b/docs/cli/defradb_client_view_add.md index 1e7b94f2b8..b671d8290c 100644 --- a/docs/cli/defradb_client_view_add.md +++ b/docs/cli/defradb_client_view_add.md @@ -29,6 +29,7 @@ defradb client view add [query] [sdl] [transform] [flags] -i, --identity string ACP Identity --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_server-dump.md b/docs/cli/defradb_server-dump.md index ff58487c65..3651d32e9c 100644 --- a/docs/cli/defradb_server-dump.md +++ b/docs/cli/defradb_server-dump.md @@ -18,6 +18,7 @@ defradb server-dump [flags] --allowed-origins stringArray List of origins to allow for CORS requests --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_start.md b/docs/cli/defradb_start.md index 4a4edeaf48..e0f732cb04 100644 --- a/docs/cli/defradb_start.md +++ b/docs/cli/defradb_start.md @@ -22,6 +22,7 @@ defradb start [flags] --allowed-origins stringArray List of origins to allow for CORS requests --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/docs/cli/defradb_version.md b/docs/cli/defradb_version.md index 810d0dc477..b4693fddbf 100644 --- a/docs/cli/defradb_version.md +++ b/docs/cli/defradb_version.md @@ -20,6 +20,7 @@ defradb version [flags] --allowed-origins stringArray List of origins to allow for CORS requests --log-format string Log format to use. Options are text or json (default "text") --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-no-color Disable colored log output --log-output string Log output path. Options are stderr or stdout. (default "stderr") --log-overrides string Logger config overrides. Format ,=,...;,... --log-source Include source location in logs diff --git a/licenses/BSL.txt b/licenses/BSL.txt index 38cf309ebc..64d1d657d6 100644 --- a/licenses/BSL.txt +++ b/licenses/BSL.txt @@ -7,7 +7,7 @@ Parameters Licensor: Democratized Data (D2) Foundation -Licensed Work: DefraDB v0.10.0 +Licensed Work: DefraDB v0.11.0 The Licensed Work is (c) 2023 D2 Foundation. @@ -28,7 +28,7 @@ Additional Use Grant: You may only use the Licensed Work for the -Change Date: 2028-03-08 +Change Date: 2028-05-03 Change License: Apache License, Version 2.0