diff --git a/.github/workflows/combine-bot-prs.yml b/.github/workflows/combine-bot-prs.yml new file mode 100644 index 0000000000..59a7910ced --- /dev/null +++ b/.github/workflows/combine-bot-prs.yml @@ -0,0 +1,168 @@ +# Copyright 2023 Democratized Data Foundation +# +# Use of this software is governed by the Business Source License +# included in the file licenses/BSL.txt. +# +# As of the Change Date specified in that file, in accordance with +# the Business Source License, use of this software will be governed +# by the Apache License, Version 2.0, included in the file +# licenses/APL.txt. + +name: Combine Bot PRs Workflow + +# Triggered manually with the following configuration options to combine dependabot PRs. + +on: + workflow_dispatch: + inputs: + + branchPrefix: + description: 'Branch prefix to find combinable PRs based on, eg: dependabot/npm_and_yarn/playground' + required: true + default: 'dependabot' + + mustBeGreen: + description: 'Only combine PRs that are green (status is success). Set to false if repo does not run checks' + type: boolean + required: true + default: true + + combineBranchName: + description: 'Name of the branch to combine PRs into' + required: true + default: 'combined-bot-prs-branch' + + ignoreLabel: + description: 'Exclude PRs with this label' + required: true + default: 'nocombine' + +jobs: + combine-bot-prs: + name: Combine bot prs job + + runs-on: ubuntu-latest + + steps: + - uses: actions/github-script@v6 + + id: create-combined-pr + + name: Create combined pr + + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const pulls = await github.paginate('GET /repos/:owner/:repo/pulls', { + owner: context.repo.owner, + repo: context.repo.repo + }); + let branchesAndPRStrings = []; + let baseBranch = null; + let baseBranchSHA = null; + for (const pull of pulls) { + const branch = pull['head']['ref']; + console.log('Pull for branch: ' + branch); + if (branch.startsWith('${{ github.event.inputs.branchPrefix }}')) { + console.log('Branch matched prefix: ' + branch); + let statusOK = true; + if(${{ github.event.inputs.mustBeGreen }}) { + console.log('Checking green status: ' + branch); + const stateQuery = `query($owner: String!, $repo: String!, $pull_number: Int!) { + repository(owner: $owner, name: $repo) { + pullRequest(number:$pull_number) { + commits(last: 1) { + nodes { + commit { + statusCheckRollup { + state + } + } + } + } + } + } + }` + const vars = { + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: pull['number'] + }; + const result = await github.graphql(stateQuery, vars); + const [{ commit }] = result.repository.pullRequest.commits.nodes; + const state = commit.statusCheckRollup.state + console.log('Validating status: ' + state); + if(state != 'SUCCESS') { + console.log('Discarding ' + branch + ' with status ' + state); + statusOK = false; + } + } + console.log('Checking labels: ' + branch); + const labels = pull['labels']; + for(const label of labels) { + const labelName = label['name']; + console.log('Checking label: ' + labelName); + if(labelName == '${{ github.event.inputs.ignoreLabel }}') { + console.log('Discarding ' + branch + ' with label ' + labelName); + statusOK = false; + } + } + if (statusOK) { + console.log('Adding branch to array: ' + branch); + const prString = '#' + pull['number'] + ' ' + pull['title']; + branchesAndPRStrings.push({ branch, prString }); + baseBranch = pull['base']['ref']; + baseBranchSHA = pull['base']['sha']; + } + } + } + if (branchesAndPRStrings.length == 0) { + core.setFailed('No PRs/branches matched criteria'); + return; + } + try { + await github.rest.git.createRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: 'refs/heads/' + '${{ github.event.inputs.combineBranchName }}', + sha: baseBranchSHA + }); + } catch (error) { + console.log(error); + core.setFailed('Failed to create combined branch - maybe a branch by that name already exists?'); + return; + } + + let combinedPRs = []; + let mergeFailedPRs = []; + for(const { branch, prString } of branchesAndPRStrings) { + try { + await github.rest.repos.merge({ + owner: context.repo.owner, + repo: context.repo.repo, + base: '${{ github.event.inputs.combineBranchName }}', + head: branch, + }); + console.log('Merged branch ' + branch); + combinedPRs.push(prString); + } catch (error) { + console.log('Failed to merge branch ' + branch); + mergeFailedPRs.push(prString); + } + } + + console.log('Creating combined PR'); + const combinedPRsString = combinedPRs.join('\n'); + let body = '✅ This PR was created by the Combine PRs action by combining the following PRs:\n' + combinedPRsString; + if(mergeFailedPRs.length > 0) { + const mergeFailedPRsString = mergeFailedPRs.join('\n'); + body += '\n\n⚠️ The following PRs were left out due to merge conflicts:\n' + mergeFailedPRsString + } + await github.rest.pulls.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: 'bot: Combined PRs', + head: '${{ github.event.inputs.combineBranchName }}', + base: baseBranch, + body: body + }); diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index df2af79dd0..a08db5565a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -46,7 +46,7 @@ jobs: # Required: the version of golangci-lint is required. # Note: The version should not pick the patch version as the latest patch # version is what will always be used. - version: v1.53 + version: v1.54 # Optional: working directory, useful for monorepos or if we wanted to run this # on a non-root directory. diff --git a/.github/workflows/push-docker-image-to-registries.yml b/.github/workflows/push-docker-image-to-registries.yml index d7d00d14aa..47c4e98046 100644 --- a/.github/workflows/push-docker-image-to-registries.yml +++ b/.github/workflows/push-docker-image-to-registries.yml @@ -23,13 +23,13 @@ env: jobs: push-docker-image-to-registries: name: Push Docker image to registries job - + runs-on: ubuntu-latest - + permissions: packages: write contents: read - + steps: - name: Check out the repo uses: actions/checkout@v3 @@ -57,7 +57,7 @@ jobs: with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - + - name: Log in to the Container registry uses: docker/login-action@v2 with: @@ -72,7 +72,7 @@ jobs: images: | sourcenetwork/defradb ghcr.io/${{ github.repository }} - + - name: Push Docker images uses: docker/build-push-action@v4 with: diff --git a/.github/workflows/test-collection-named.yml b/.github/workflows/test-collection-named.yml new file mode 100644 index 0000000000..5adabe4fdf --- /dev/null +++ b/.github/workflows/test-collection-named.yml @@ -0,0 +1,54 @@ +# Copyright 2023 Democratized Data Foundation +# +# Use of this software is governed by the Business Source License +# included in the file licenses/BSL.txt. +# +# As of the Change Date specified in that file, in accordance with +# the Business Source License, use of this software will be governed +# by the Apache License, Version 2.0, included in the file +# licenses/APL.txt. + +name: Run Collection Named Mutations Tests Workflow + +# This workflow runs the test suite with any supporting mutation test actions +# running their mutations via their corresponding named [Collection] call. +# +# For example, CreateDoc will call [Collection.Create], and +# UpdateDoc will call [Collection.Update]. + +on: + pull_request: + branches: + - master + - develop + + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + branches: + - master + - develop + +jobs: + test-collection-named-mutations: + name: Test Collection Named Mutations job + + runs-on: ubuntu-latest + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v3 + + - name: Setup Go environment explicitly + uses: actions/setup-go@v3 + with: + go-version: "1.20" + check-latest: true + + - name: Build dependencies + run: | + make deps:modules + make deps:test + + - name: Run tests with Collection Named mutations + run: make test:ci-col-named-mutations diff --git a/.github/workflows/test-gql-mutations.yml b/.github/workflows/test-gql-mutations.yml new file mode 100644 index 0000000000..827dd22098 --- /dev/null +++ b/.github/workflows/test-gql-mutations.yml @@ -0,0 +1,48 @@ +# Copyright 2022 Democratized Data Foundation +# +# Use of this software is governed by the Business Source License +# included in the file licenses/BSL.txt. +# +# As of the Change Date specified in that file, in accordance with +# the Business Source License, use of this software will be governed +# by the Apache License, Version 2.0, included in the file +# licenses/APL.txt. + +name: Run GQL Mutations Tests Workflow + +on: + pull_request: + branches: + - master + - develop + + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + branches: + - master + - develop + +jobs: + test-gql-mutations: + name: Test GQL mutations job + + runs-on: ubuntu-latest + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v3 + + - name: Setup Go environment explicitly + uses: actions/setup-go@v3 + with: + go-version: "1.20" + check-latest: true + + - name: Build dependencies + run: | + make deps:modules + make deps:test + + - name: Run tests with gql mutations + run: make test:ci-gql-mutations diff --git a/CHANGELOG.md b/CHANGELOG.md index 3638d3ef75..92e4e0cb4b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,75 @@ + +## [v0.7.0](https://github.com/sourcenetwork/defradb/compare/v0.6.0...v0.7.0) + +> 2023-09-18 + +DefraDB v0.7 is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes. + +This release has focused on robustness, testing, and schema management. Some highlight new features include notable expansions to the expressiveness of schema migrations. + +To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.5.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.source.network/. + +### Features + +* Allow field indexing by name in PatchSchema ([#1810](https://github.com/sourcenetwork/defradb/issues/1810)) +* Auto-create relation id fields via PatchSchema ([#1807](https://github.com/sourcenetwork/defradb/issues/1807)) +* Support PatchSchema relational field kind substitution ([#1777](https://github.com/sourcenetwork/defradb/issues/1777)) +* Add support for adding of relational fields ([#1766](https://github.com/sourcenetwork/defradb/issues/1766)) +* Enable downgrading of documents via Lens inverses ([#1721](https://github.com/sourcenetwork/defradb/issues/1721)) + +### Fixes + +* Correctly handle serialisation of nil field values ([#1872](https://github.com/sourcenetwork/defradb/issues/1872)) +* Compound filter operators with relations ([#1855](https://github.com/sourcenetwork/defradb/issues/1855)) +* Only update updated fields via update requests ([#1817](https://github.com/sourcenetwork/defradb/issues/1817)) +* Error when saving a deleted document ([#1806](https://github.com/sourcenetwork/defradb/issues/1806)) +* Prevent multiple docs from being linked in one one ([#1790](https://github.com/sourcenetwork/defradb/issues/1790)) +* Handle the querying of secondary relation id fields ([#1768](https://github.com/sourcenetwork/defradb/issues/1768)) +* Improve the way migrations handle transactions ([#1737](https://github.com/sourcenetwork/defradb/issues/1737)) + +### Tooling + +* Add Akash deployment configuration ([#1736](https://github.com/sourcenetwork/defradb/issues/1736)) + +### Refactoring + +* HTTP client interface ([#1776](https://github.com/sourcenetwork/defradb/issues/1776)) +* Simplify fetcher interface ([#1746](https://github.com/sourcenetwork/defradb/issues/1746)) + +### Testing + +* Convert and move out of place explain tests ([#1878](https://github.com/sourcenetwork/defradb/issues/1878)) +* Update mutation tests to make use of mutation system ([#1853](https://github.com/sourcenetwork/defradb/issues/1853)) +* Test top level agg. with compound relational filter ([#1870](https://github.com/sourcenetwork/defradb/issues/1870)) +* Skip unsupported mutation types at test level ([#1850](https://github.com/sourcenetwork/defradb/issues/1850)) +* Extend mutation tests with col.Update and Create ([#1838](https://github.com/sourcenetwork/defradb/issues/1838)) +* Add tests for multiple one-one joins ([#1793](https://github.com/sourcenetwork/defradb/issues/1793)) + +### Chore + +* Update Badger version to v4 ([#1740](https://github.com/sourcenetwork/defradb/issues/1740)) +* Update go-libp2p to 0.29.2 ([#1780](https://github.com/sourcenetwork/defradb/issues/1780)) +* Bump golangci-lint to v1.54 ([#1881](https://github.com/sourcenetwork/defradb/issues/1881)) +* Bump go.opentelemetry.io/otel/metric from 1.17.0 to 1.18.0 ([#1890](https://github.com/sourcenetwork/defradb/issues/1890)) +* Bump [@tanstack](https://github.com/tanstack)/react-query from 4.35.0 to 4.35.3 in /playground ([#1876](https://github.com/sourcenetwork/defradb/issues/1876)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 6.5.0 to 6.7.0 in /playground ([#1874](https://github.com/sourcenetwork/defradb/issues/1874)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 6.6.0 to 6.7.0 in /playground ([#1875](https://github.com/sourcenetwork/defradb/issues/1875)) +* Combined PRs 2023-09-14 ([#1873](https://github.com/sourcenetwork/defradb/issues/1873)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 6.4.0 to 6.5.0 in /playground ([#1827](https://github.com/sourcenetwork/defradb/issues/1827)) +* Bump go.opentelemetry.io/otel/sdk/metric from 0.39.0 to 0.40.0 ([#1829](https://github.com/sourcenetwork/defradb/issues/1829)) +* Bump github.com/ipfs/go-block-format from 0.1.2 to 0.2.0 ([#1819](https://github.com/sourcenetwork/defradb/issues/1819)) +* Combined PRs ([#1826](https://github.com/sourcenetwork/defradb/issues/1826)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 6.4.0 to 6.4.1 in /playground ([#1804](https://github.com/sourcenetwork/defradb/issues/1804)) +* Combined PRs ([#1803](https://github.com/sourcenetwork/defradb/issues/1803)) +* Combined PRs ([#1791](https://github.com/sourcenetwork/defradb/issues/1791)) +* Combined PRs ([#1778](https://github.com/sourcenetwork/defradb/issues/1778)) +* Bump dependencies ([#1761](https://github.com/sourcenetwork/defradb/issues/1761)) +* Bump vite from 4.3.9 to 4.4.8 in /playground ([#1748](https://github.com/sourcenetwork/defradb/issues/1748)) +* Bump graphiql from 3.0.4 to 3.0.5 in /playground ([#1730](https://github.com/sourcenetwork/defradb/issues/1730)) +* Combined bumps of dependencies under /playground ([#1744](https://github.com/sourcenetwork/defradb/issues/1744)) +* Bump github.com/ipfs/boxo from 0.10.2 to 0.11.0 ([#1726](https://github.com/sourcenetwork/defradb/issues/1726)) +* Bump github.com/libp2p/go-libp2p-kad-dht from 0.24.2 to 0.24.3 ([#1724](https://github.com/sourcenetwork/defradb/issues/1724)) +* Bump google.golang.org/grpc from 1.56.2 to 1.57.0 ([#1725](https://github.com/sourcenetwork/defradb/issues/1725)) ## [v0.6.0](https://github.com/sourcenetwork/defradb/compare/v0.5.1...v0.6.0) diff --git a/Makefile b/Makefile index 0e79f59646..21fcfcedf1 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ ifdef BUILD_TAGS BUILD_FLAGS+=-tags $(BUILD_TAGS) endif -TEST_FLAGS=-race -shuffle=on -timeout 150s +TEST_FLAGS=-race -shuffle=on -timeout 300s PLAYGROUND_DIRECTORY=playground LENS_TEST_DIRECTORY=tests/integration/schema/migrations @@ -76,7 +76,7 @@ client\:add-schema: .PHONY: deps\:lint deps\:lint: - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.53 + go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.54 .PHONY: deps\:test deps\:test: @@ -106,7 +106,7 @@ deps\:modules: .PHONY: deps\:mock deps\:mock: - go install github.com/vektra/mockery/v2@v2.30.1 + go install github.com/vektra/mockery/v2@v2.32.0 .PHONY: deps\:playground deps\:playground: @@ -125,16 +125,7 @@ deps: .PHONY: mock mock: @$(MAKE) deps:mock - mockery --dir ./client --output ./client/mocks --name DB --with-expecter - mockery --dir ./client --output ./client/mocks --name Collection --with-expecter - mockery --dir ./datastore --output ./datastore/mocks --name DAGStore --with-expecter - mockery --dir ./datastore --output ./datastore/mocks --name DSReaderWriter --with-expecter - mockery --srcpkg github.com/ipfs/go-datastore/query --output ./datastore/mocks --name Results --with-expecter - mockery --dir ./datastore --output ./datastore/mocks --name RootStore --with-expecter - mockery --dir ./datastore --output ./datastore/mocks --name Txn --with-expecter - mockery --dir ./datastore --output ./datastore/mocks --name DAGStore --with-expecter - mockery --dir ./db/fetcher --output ./db/fetcher/mocks --name Fetcher --with-expecter - mockery --dir ./db/fetcher --output ./db/fetcher/mocks --name EncodedDocument --with-expecter + mockery --config="tools/configs/mockery.yaml" .PHONY: dev\:start dev\:start: @@ -196,12 +187,43 @@ test\:build: .PHONY: test\:ci test\:ci: - DEFRA_BADGER_MEMORY=true DEFRA_BADGER_FILE=true $(MAKE) test:all + DEFRA_BADGER_MEMORY=true DEFRA_BADGER_FILE=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + $(MAKE) test:all + +.PHONY: test\:ci-gql-mutations +test\:ci-gql-mutations: + DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + $(MAKE) test:all + +.PHONY: test\:gql-mutations +test\:gql-mutations: + DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true gotestsum --format pkgname -- $(DEFAULT_TEST_DIRECTORIES) + +# This action and the test:col-named-mutations (below) runs the test suite with any supporting mutation test +# actions running their mutations via their corresponding named [Collection] call. +# +# For example, CreateDoc will call [Collection.Create], and +# UpdateDoc will call [Collection.Update]. +.PHONY: test\:ci-col-named-mutations +test\:ci-col-named-mutations: + DEFRA_MUTATION_TYPE=collection-named DEFRA_BADGER_MEMORY=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + $(MAKE) test:all + +.PHONY: test\:col-named-mutations +test\:col-named-mutations: + DEFRA_MUTATION_TYPE=collection-named DEFRA_BADGER_MEMORY=true gotestsum --format pkgname -- $(DEFAULT_TEST_DIRECTORIES) .PHONY: test\:go test\:go: go test $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) +.PHONY: test\:http +test\:http: + DEFRA_CLIENT_HTTP=true go test $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) + .PHONY: test\:names test\:names: gotestsum --format testname -- $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) @@ -273,7 +295,7 @@ test\:coverage-html: .PHONY: test\:changes test\:changes: @$(MAKE) deps:lens - env DEFRA_DETECT_DATABASE_CHANGES=true gotestsum -- ./... -shuffle=on -p 1 + env DEFRA_DETECT_DATABASE_CHANGES=true DEFRA_CLIENT_GO=true gotestsum -- ./... -shuffle=on -p 1 .PHONY: validate\:codecov validate\:codecov: diff --git a/api/http/handler_test.go b/api/http/handler_test.go index dbc3b346f2..2015c7a0ba 100644 --- a/api/http/handler_test.go +++ b/api/http/handler_test.go @@ -20,11 +20,11 @@ import ( "path" "testing" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" "github.com/pkg/errors" "github.com/stretchr/testify/assert" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/logging" ) diff --git a/api/http/handlerfuncs.go b/api/http/handlerfuncs.go index 9e5b212fe3..e4163de05f 100644 --- a/api/http/handlerfuncs.go +++ b/api/http/handlerfuncs.go @@ -298,12 +298,6 @@ func setMigrationHandler(rw http.ResponseWriter, req *http.Request) { return } - txn, err := db.NewTxn(req.Context(), false) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - var cfg client.LensConfig err = json.Unmarshal(cfgStr, &cfg) if err != nil { @@ -311,13 +305,7 @@ func setMigrationHandler(rw http.ResponseWriter, req *http.Request) { return } - err = db.LensRegistry().SetMigration(req.Context(), txn, cfg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - err = txn.Commit(req.Context()) + err = db.LensRegistry().SetMigration(req.Context(), cfg) if err != nil { handleErr(req.Context(), rw, err, http.StatusInternalServerError) return @@ -338,7 +326,7 @@ func getMigrationHandler(rw http.ResponseWriter, req *http.Request) { return } - cfgs := db.LensRegistry().Config() + cfgs, err := db.LensRegistry().Config(req.Context()) if err != nil { handleErr(req.Context(), rw, err, http.StatusInternalServerError) return diff --git a/api/http/handlerfuncs_test.go b/api/http/handlerfuncs_test.go index bb7bb71aad..bef944f908 100644 --- a/api/http/handlerfuncs_test.go +++ b/api/http/handlerfuncs_test.go @@ -22,7 +22,7 @@ import ( "testing" "time" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" dshelp "github.com/ipfs/boxo/datastore/dshelp" "github.com/ipfs/go-cid" "github.com/stretchr/testify/assert" @@ -30,7 +30,7 @@ import ( "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" ) diff --git a/api/http/server.go b/api/http/server.go index e662c824b6..a71dccb0ec 100644 --- a/api/http/server.go +++ b/api/http/server.go @@ -52,6 +52,8 @@ type Server struct { options serverOptions listener net.Listener certManager *autocert.Manager + // address that is assigned to the server on listen + address string http.Server } @@ -215,6 +217,10 @@ func (s *Server) Listen(ctx context.Context) error { return errors.WithStack(err) } + // Save the address on the server in case the port was set to random + // and that we want to see what was assigned. + s.address = s.listener.Addr().String() + return nil } @@ -284,6 +290,10 @@ func (s *Server) listenWithTLS(ctx context.Context) error { return errors.WithStack(err) } + // Save the address on the server in case the port was set to random + // and that we want to see what was assigned. + s.address = s.listener.Addr().String() + return nil } @@ -305,3 +315,8 @@ func (s *Server) Run(ctx context.Context) error { } return s.Serve(s.listener) } + +// AssignedAddr returns the address that was assigned to the server on calls to listen. +func (s *Server) AssignedAddr() string { + return s.address +} diff --git a/cli/backup_import_test.go b/cli/backup_import_test.go index ce84c5c2c6..101792dd0c 100644 --- a/cli/backup_import_test.go +++ b/cli/backup_import_test.go @@ -22,7 +22,6 @@ import ( func TestBackupImportCmd_WithNoArgument_ReturnError(t *testing.T) { cfg := getTestConfig(t) - setTestingAddresses(cfg) dbImportCmd := MakeBackupImportCommand(cfg) err := dbImportCmd.ValidateArgs([]string{}) diff --git a/cli/cli_test.go b/cli/cli_test.go index 4361191e49..877dd7b69f 100644 --- a/cli/cli_test.go +++ b/cli/cli_test.go @@ -11,16 +11,12 @@ package cli import ( - "fmt" - "math/rand" - "net" "testing" "github.com/spf13/cobra" "github.com/stretchr/testify/assert" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" ) // Verify that the top-level commands are registered, and if particular ones have subcommands. @@ -61,24 +57,3 @@ func walkCommandTree(t *testing.T, cmd *cobra.Command, f func(*cobra.Command)) { walkCommandTree(t, c, f) } } - -// findFreePortInRange returns a free port in the range [minPort, maxPort]. -// The range of ports that are unfrequently used is [49152, 65535]. -func findFreePortInRange(minPort, maxPort int) (int, error) { - if minPort < 1 || maxPort > 65535 || minPort > maxPort { - return 0, errors.New("invalid port range") - } - - const maxAttempts = 100 - for i := 0; i < maxAttempts; i++ { - port := rand.Intn(maxPort-minPort+1) + minPort - addr := fmt.Sprintf("127.0.0.1:%d", port) - listener, err := net.Listen("tcp", addr) - if err == nil { - _ = listener.Close() - return port, nil - } - } - - return 0, errors.New("unable to find a free port") -} diff --git a/cli/index_create_test.go b/cli/index_create_test.go index 7032abbb2f..ac75248c10 100644 --- a/cli/index_create_test.go +++ b/cli/index_create_test.go @@ -37,12 +37,12 @@ func getTestConfig(t *testing.T) *config.Config { cfg.Net.P2PAddress = randomMultiaddr cfg.Net.RPCAddress = "0.0.0.0:0" cfg.Net.TCPAddress = randomMultiaddr + cfg.API.Address = "0.0.0.0:0" return cfg } func startTestNode(t *testing.T) (*config.Config, *defraInstance, func()) { cfg := getTestConfig(t) - setTestingAddresses(cfg) ctx := context.Background() di, err := start(ctx, cfg) diff --git a/cli/peerid_test.go b/cli/peerid_test.go index 16cebd7066..34874ef80d 100644 --- a/cli/peerid_test.go +++ b/cli/peerid_test.go @@ -14,7 +14,6 @@ import ( "bytes" "context" "encoding/json" - "fmt" "io" "net/http" "testing" @@ -22,43 +21,16 @@ import ( "github.com/stretchr/testify/assert" httpapi "github.com/sourcenetwork/defradb/api/http" - "github.com/sourcenetwork/defradb/config" ) -// setTestingAddresses overrides the config addresses to be the ones reserved for testing. -// Used to ensure the tests don't fail due to address clashes with the running server (with default config). -func setTestingAddresses(cfg *config.Config) { - portAPI, err := findFreePortInRange(49152, 65535) - if err != nil { - panic(err) - } - portTCP, err := findFreePortInRange(49152, 65535) - if err != nil { - panic(err) - } - portP2P, err := findFreePortInRange(49152, 65535) - if err != nil { - panic(err) - } - portRPC, err := findFreePortInRange(49152, 65535) - if err != nil { - panic(err) - } - cfg.API.Address = fmt.Sprintf("localhost:%d", portAPI) - cfg.Net.P2PAddress = fmt.Sprintf("/ip4/0.0.0.0/tcp/%d", portP2P) - cfg.Net.TCPAddress = fmt.Sprintf("/ip4/0.0.0.0/tcp/%d", portTCP) - cfg.Net.RPCAddress = fmt.Sprintf("0.0.0.0:%d", portRPC) -} - func TestGetPeerIDCmd(t *testing.T) { - cfg := config.DefaultConfig() + cfg := getTestConfig(t) peerIDCmd := MakePeerIDCommand(cfg) dir := t.TempDir() ctx := context.Background() cfg.Datastore.Store = "memory" cfg.Datastore.Badger.Path = dir cfg.Net.P2PDisabled = false - setTestingAddresses(cfg) di, err := start(ctx, cfg) if err != nil { @@ -89,14 +61,13 @@ func TestGetPeerIDCmd(t *testing.T) { } func TestGetPeerIDCmdWithNoP2P(t *testing.T) { - cfg := config.DefaultConfig() + cfg := getTestConfig(t) peerIDCmd := MakePeerIDCommand(cfg) dir := t.TempDir() ctx := context.Background() cfg.Datastore.Store = "memory" cfg.Datastore.Badger.Path = dir cfg.Net.P2PDisabled = true - setTestingAddresses(cfg) di, err := start(ctx, cfg) if err != nil { diff --git a/cli/serverdump.go b/cli/serverdump.go index 2888b39a8e..0ba638d268 100644 --- a/cli/serverdump.go +++ b/cli/serverdump.go @@ -19,7 +19,7 @@ import ( "github.com/sourcenetwork/defradb/config" ds "github.com/sourcenetwork/defradb/datastore" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/logging" diff --git a/cli/start.go b/cli/start.go index 5d571be46d..9185af8c92 100644 --- a/cli/start.go +++ b/cli/start.go @@ -20,7 +20,7 @@ import ( "strings" "syscall" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" ma "github.com/multiformats/go-multiaddr" @@ -32,7 +32,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" ds "github.com/sourcenetwork/defradb/datastore" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/logging" @@ -346,6 +346,8 @@ func start(ctx context.Context, cfg *config.Config) (*defraInstance, error) { if err := s.Listen(ctx); err != nil { return nil, errors.Wrap(fmt.Sprintf("failed to listen on TCP address %v", s.Addr), err) } + // save the address on the config in case the port number was set to random + cfg.API.Address = s.AssignedAddr() // run the server in a separate goroutine go func() { diff --git a/client/document.go b/client/document.go index 5c8fd9441d..c48ccfce88 100644 --- a/client/document.go +++ b/client/document.go @@ -209,11 +209,7 @@ func (doc *Document) SetWithJSON(patch []byte) error { } for k, v := range patchObj { - if v == nil { - err = doc.Delete(k) - } else { - err = doc.Set(k, v) - } + err = doc.Set(k, v) if err != nil { return err } @@ -273,7 +269,7 @@ func (doc *Document) setObject(t CType, field string, val *Document) error { // @todo: Update with document schemas func (doc *Document) setAndParseType(field string, value any) error { if value == nil { - return nil + return doc.setCBOR(LWW_REGISTER, field, value) } switch val := value.(type) { @@ -283,6 +279,12 @@ func (doc *Document) setAndParseType(field string, value any) error { if err != nil { return err } + case uint64: + err := doc.setCBOR(LWW_REGISTER, field, int64(val)) + if err != nil { + return err + } + case float64: // case int64: @@ -300,7 +302,7 @@ func (doc *Document) setAndParseType(field string, value any) error { } // string, bool, and more - case string, bool, []any: + case string, bool, int64, []any, []bool, []*bool, []int64, []*int64, []float64, []*float64, []string, []*string: err := doc.setCBOR(LWW_REGISTER, field, val) if err != nil { return err diff --git a/client/document_test.go b/client/document_test.go index c2e9b406c0..9073373cd3 100644 --- a/client/document_test.go +++ b/client/document_test.go @@ -132,7 +132,8 @@ func TestSetWithJSON(t *testing.T) { assert.Equal(t, doc.values[doc.fields["Name"]].IsDocument(), false) assert.Equal(t, doc.values[doc.fields["Age"]].Value(), int64(27)) assert.Equal(t, doc.values[doc.fields["Age"]].IsDocument(), false) - assert.Equal(t, doc.values[doc.fields["Address"]].IsDelete(), true) + assert.Equal(t, doc.values[doc.fields["Address"]].Value(), nil) + assert.Equal(t, doc.values[doc.fields["Address"]].IsDocument(), false) //subdoc fields // subDoc := doc.values[doc.fields["Address"]].Value().(*Document) diff --git a/client/errors.go b/client/errors.go index 035ac87235..ad1ad0027a 100644 --- a/client/errors.go +++ b/client/errors.go @@ -22,6 +22,7 @@ const ( errParsingFailed string = "failed to parse argument" errUninitializeProperty string = "invalid state, required property is uninitialized" errMaxTxnRetries string = "reached maximum transaction reties" + errRelationOneSided string = "relation must be defined on both schemas" ) // Errors returnable from this package. @@ -43,6 +44,7 @@ var ( ErrMalformedDocKey = errors.New("malformed DocKey, missing either version or cid") ErrInvalidDocKeyVersion = errors.New("invalid DocKey version") ErrMaxTxnRetries = errors.New(errMaxTxnRetries) + ErrRelationOneSided = errors.New(errRelationOneSided) ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. @@ -97,3 +99,11 @@ func NewErrUninitializeProperty(host string, propertyName string) error { func NewErrMaxTxnRetries(inner error) error { return errors.Wrap(errMaxTxnRetries, inner) } + +func NewErrRelationOneSided(fieldName string, typeName string) error { + return errors.New( + errRelationOneSided, + errors.NewKV("Field", fieldName), + errors.NewKV("Type", typeName), + ) +} diff --git a/client/lens.go b/client/lens.go index 1cffa19248..7b1264275f 100644 --- a/client/lens.go +++ b/client/lens.go @@ -43,6 +43,12 @@ type LensConfig struct { // LensRegistry exposes several useful thread-safe migration related functions which may // be used to manage migrations. type LensRegistry interface { + // WithTxn returns a new LensRegistry scoped to the given transaction. + // + // WARNING: Currently this does not provide snapshot isolation, if other transactions are commited + // after this has been created, the results of those commits will be visible within this scope. + WithTxn(datastore.Txn) LensRegistry + // SetMigration sets the migration for the given source-destination schema version IDs. Is equivilent to // calling `Store.SetMigration(ctx, cfg)`. // @@ -55,29 +61,37 @@ type LensRegistry interface { // // Migrations will only run if there is a complete path from the document schema version to the latest local // schema version. - SetMigration(context.Context, datastore.Txn, LensConfig) error + SetMigration(context.Context, LensConfig) error // ReloadLenses clears any cached migrations, loads their configurations from the database and re-initializes // them. It is run on database start if the database already existed. - ReloadLenses(ctx context.Context, txn datastore.Txn) error + ReloadLenses(context.Context) error // MigrateUp returns an enumerable that feeds the given source through the Lens migration for the given // schema version id if one is found, if there is no matching migration the given source will be returned. - MigrateUp(enumerable.Enumerable[map[string]any], string) (enumerable.Enumerable[map[string]any], error) + MigrateUp( + context.Context, + enumerable.Enumerable[map[string]any], + string, + ) (enumerable.Enumerable[map[string]any], error) // MigrateDown returns an enumerable that feeds the given source through the Lens migration for the schema // version that precedes the given schema version id in reverse, if one is found, if there is no matching // migration the given source will be returned. // // This downgrades any documents in the source enumerable if/when enumerated. - MigrateDown(enumerable.Enumerable[map[string]any], string) (enumerable.Enumerable[map[string]any], error) + MigrateDown( + context.Context, + enumerable.Enumerable[map[string]any], + string, + ) (enumerable.Enumerable[map[string]any], error) // Config returns a slice of the configurations of the currently loaded migrations. // // Modifying the slice does not affect the loaded configurations. - Config() []LensConfig + Config(context.Context) ([]LensConfig, error) // HasMigration returns true if there is a migration registered for the given schema version id, otherwise // will return false. - HasMigration(string) bool + HasMigration(context.Context, string) (bool, error) } diff --git a/client/mocks/Collection.go b/client/mocks/collection.go similarity index 99% rename from client/mocks/Collection.go rename to client/mocks/collection.go index a675cd1a17..16d052c337 100644 --- a/client/mocks/Collection.go +++ b/client/mocks/collection.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.30.1. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks diff --git a/client/mocks/DB.go b/client/mocks/db.go similarity index 99% rename from client/mocks/DB.go rename to client/mocks/db.go index 82d53291da..cb0af26193 100644 --- a/client/mocks/DB.go +++ b/client/mocks/db.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.30.1. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks diff --git a/client/request/filter.go b/client/request/filter.go index bf32713844..67a80b58e7 100644 --- a/client/request/filter.go +++ b/client/request/filter.go @@ -10,6 +10,12 @@ package request +const ( + FilterOpOr = "_or" + FilterOpAnd = "_and" + FilterOpNot = "_not" +) + // Filter contains the parsed condition map to be // run by the Filter Evaluator. // @todo: Cache filter structure for faster condition diff --git a/config/config.go b/config/config.go index e659dc0cbc..9832a92818 100644 --- a/config/config.go +++ b/config/config.go @@ -59,7 +59,7 @@ import ( "github.com/spf13/viper" "golang.org/x/net/idna" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/logging" ) diff --git a/core/clock.go b/core/clock.go index 0f02186a9f..622a36233c 100644 --- a/core/clock.go +++ b/core/clock.go @@ -24,5 +24,5 @@ type MerkleClock interface { ctx context.Context, delta Delta, ) (ipld.Node, error) // possibly change to AddDeltaNode? - ProcessNode(context.Context, NodeGetter, cid.Cid, uint64, Delta, ipld.Node) ([]cid.Cid, error) + ProcessNode(context.Context, NodeGetter, Delta, ipld.Node) ([]cid.Cid, error) } diff --git a/core/crdt/composite.go b/core/crdt/composite.go index ab6cbe95f5..5504a19437 100644 --- a/core/crdt/composite.go +++ b/core/crdt/composite.go @@ -102,6 +102,8 @@ type CompositeDAG struct { fieldName string } +var _ core.ReplicatedData = CompositeDAG{} + func NewCompositeDAG( store datastore.DSReaderWriter, schemaVersionKey core.CollectionSchemaVersionKey, @@ -145,8 +147,10 @@ func (c CompositeDAG) Set(patch []byte, links []core.DAGLink) *CompositeDAGDelta // Merge implements ReplicatedData interface. // It ensures that the object marker exists for the given key. // If it doesn't, it adds it to the store. -func (c CompositeDAG) Merge(ctx context.Context, delta core.Delta, id string) error { - if dagDelta, ok := delta.(*CompositeDAGDelta); ok && dagDelta.Status.IsDeleted() { +func (c CompositeDAG) Merge(ctx context.Context, delta core.Delta) error { + dagDelta, isDagDelta := delta.(*CompositeDAGDelta) + + if isDagDelta && dagDelta.Status.IsDeleted() { err := c.store.Put(ctx, c.key.ToPrimaryDataStoreKey().ToDS(), []byte{base.DeletedObjectMarker}) if err != nil { return err @@ -168,7 +172,17 @@ func (c CompositeDAG) Merge(ctx context.Context, delta core.Delta, id string) er versionKey = versionKey.WithDeletedFlag() } - err = c.store.Put(ctx, versionKey.ToDS(), []byte(c.schemaVersionKey.SchemaVersionId)) + var schemaVersionId string + if isDagDelta { + // If this is a CompositeDAGDelta take the datastore schema version from there. + // This is particularly important for P2P synced dags, as they may arrive here without having + // been migrated yet locally. + schemaVersionId = dagDelta.SchemaVersionID + } else { + schemaVersionId = c.schemaVersionKey.SchemaVersionId + } + + err = c.store.Put(ctx, versionKey.ToDS(), []byte(schemaVersionId)) if err != nil { return err } diff --git a/core/crdt/lwwreg.go b/core/crdt/lwwreg.go index 9ff5ec266c..60df739319 100644 --- a/core/crdt/lwwreg.go +++ b/core/crdt/lwwreg.go @@ -145,7 +145,7 @@ func (reg LWWRegister) ID() string { // Merge two LWWRegisty based on the order of the timestamp (ts), // if they are equal, compare IDs // MUTATE STATE -func (reg LWWRegister) Merge(ctx context.Context, delta core.Delta, id string) error { +func (reg LWWRegister) Merge(ctx context.Context, delta core.Delta) error { d, ok := delta.(*LWWRegDelta) if !ok { return ErrMismatchedMergeType @@ -174,12 +174,11 @@ func (reg LWWRegister) setValue(ctx context.Context, val []byte, priority uint64 if priority < curPrio { return nil } else if priority == curPrio { - curValue, _ := reg.store.Get(ctx, key.ToDS()) - // Do not use the first byte of the current value in the comparison. - // It's metadata that will falsify the result. - if len(curValue) > 0 { - curValue = curValue[1:] + curValue, err := reg.store.Get(ctx, key.ToDS()) + if err != nil { + return err } + if bytes.Compare(curValue, val) >= 0 { return nil } diff --git a/core/crdt/lwwreg_test.go b/core/crdt/lwwreg_test.go index d8bed1a93f..2b978feb2d 100644 --- a/core/crdt/lwwreg_test.go +++ b/core/crdt/lwwreg_test.go @@ -40,7 +40,7 @@ func setupLoadedLWWRegster(ctx context.Context) LWWRegister { lww := setupLWWRegister() addDelta := lww.Set([]byte("test")) addDelta.SetPriority(1) - lww.Merge(ctx, addDelta, "test") + lww.Merge(ctx, addDelta) return lww } @@ -57,7 +57,8 @@ func TestLWWRegisterInitialMerge(t *testing.T) { ctx := context.Background() lww := setupLWWRegister() addDelta := lww.Set([]byte("test")) - err := lww.Merge(ctx, addDelta, "test") + addDelta.SetPriority(1) + err := lww.Merge(ctx, addDelta) if err != nil { t.Errorf("Unexpected error: %s\n", err) return @@ -80,7 +81,7 @@ func TestLWWReisterFollowupMerge(t *testing.T) { lww := setupLoadedLWWRegster(ctx) addDelta := lww.Set([]byte("test2")) addDelta.SetPriority(2) - lww.Merge(ctx, addDelta, "test") + lww.Merge(ctx, addDelta) val, err := lww.Value(ctx) if err != nil { @@ -97,7 +98,7 @@ func TestLWWRegisterOldMerge(t *testing.T) { lww := setupLoadedLWWRegster(ctx) addDelta := lww.Set([]byte("test-1")) addDelta.SetPriority(0) - lww.Merge(ctx, addDelta, "test") + lww.Merge(ctx, addDelta) val, err := lww.Value(ctx) if err != nil { diff --git a/core/replicated.go b/core/replicated.go index fde3899840..86d0523e42 100644 --- a/core/replicated.go +++ b/core/replicated.go @@ -21,7 +21,7 @@ import ( // replicated data so as to converge on the same state. type ReplicatedData interface { ID() string - Merge(ctx context.Context, other Delta, id string) error + Merge(ctx context.Context, other Delta) error DeltaDecode(node ipld.Node) (Delta, error) // possibly rename to just Decode Value(ctx context.Context) ([]byte, error) } diff --git a/datastore/badger/v3/compat_logger.go b/datastore/badger/v4/compat_logger.go similarity index 100% rename from datastore/badger/v3/compat_logger.go rename to datastore/badger/v4/compat_logger.go diff --git a/datastore/badger/v3/datastore.go b/datastore/badger/v4/datastore.go similarity index 99% rename from datastore/badger/v3/datastore.go rename to datastore/badger/v4/datastore.go index ea58fb6c7a..23ed75df53 100644 --- a/datastore/badger/v3/datastore.go +++ b/datastore/badger/v4/datastore.go @@ -11,7 +11,7 @@ import ( "sync" "time" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" ds "github.com/ipfs/go-datastore" dsq "github.com/ipfs/go-datastore/query" logger "github.com/ipfs/go-log/v2" diff --git a/datastore/badger/v3/datastore_test.go b/datastore/badger/v4/datastore_test.go similarity index 99% rename from datastore/badger/v3/datastore_test.go rename to datastore/badger/v4/datastore_test.go index 858f87f633..e978fde92e 100644 --- a/datastore/badger/v3/datastore_test.go +++ b/datastore/badger/v4/datastore_test.go @@ -15,7 +15,7 @@ import ( "testing" "time" - "github.com/dgraph-io/badger/v3" + "github.com/dgraph-io/badger/v4" ds "github.com/ipfs/go-datastore" dsq "github.com/ipfs/go-datastore/query" "github.com/stretchr/testify/assert" diff --git a/datastore/badger/v3/errors.go b/datastore/badger/v4/errors.go similarity index 100% rename from datastore/badger/v3/errors.go rename to datastore/badger/v4/errors.go diff --git a/datastore/badger/v3/iterator.go b/datastore/badger/v4/iterator.go similarity index 99% rename from datastore/badger/v3/iterator.go rename to datastore/badger/v4/iterator.go index 3fa0b60b9c..15d8dfbf6d 100644 --- a/datastore/badger/v3/iterator.go +++ b/datastore/badger/v4/iterator.go @@ -17,7 +17,7 @@ import ( "context" "sync" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" ds "github.com/ipfs/go-datastore" dsq "github.com/ipfs/go-datastore/query" goprocess "github.com/jbenet/goprocess" diff --git a/datastore/concurrent_txn.go b/datastore/concurrent_txn.go index 5b2b6defd2..2a8aed015e 100644 --- a/datastore/concurrent_txn.go +++ b/datastore/concurrent_txn.go @@ -31,7 +31,7 @@ type concurrentTxn struct { } // NewConcurrentTxnFrom creates a new Txn from rootstore that supports concurrent API calls -func NewConcurrentTxnFrom(ctx context.Context, rootstore ds.TxnDatastore, readonly bool) (Txn, error) { +func NewConcurrentTxnFrom(ctx context.Context, rootstore ds.TxnDatastore, id uint64, readonly bool) (Txn, error) { var rootTxn ds.Txn var err error @@ -54,6 +54,8 @@ func NewConcurrentTxnFrom(ctx context.Context, rootstore ds.TxnDatastore, readon return &txn{ rootConcurentTxn, multistore, + id, + []func(){}, []func(){}, []func(){}, }, nil diff --git a/datastore/concurrent_txt_test.go b/datastore/concurrent_txt_test.go index e25dba3f5d..f3e03b8c3e 100644 --- a/datastore/concurrent_txt_test.go +++ b/datastore/concurrent_txt_test.go @@ -14,11 +14,11 @@ import ( "context" "testing" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" ds "github.com/ipfs/go-datastore" "github.com/stretchr/testify/require" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/datastore/memory" ) @@ -28,7 +28,7 @@ func TestNewConcurrentTxnFrom(t *testing.T) { rootstore, err := badgerds.NewDatastore("", &opts) require.NoError(t, err) - txn, err := NewConcurrentTxnFrom(ctx, rootstore, false) + txn, err := NewConcurrentTxnFrom(ctx, rootstore, 0, false) require.NoError(t, err) err = txn.Commit(ctx) @@ -44,7 +44,7 @@ func TestNewConcurrentTxnFromWithStoreClosed(t *testing.T) { err = rootstore.Close() require.NoError(t, err) - _, err = NewConcurrentTxnFrom(ctx, rootstore, false) + _, err = NewConcurrentTxnFrom(ctx, rootstore, 0, false) require.ErrorIs(t, err, badgerds.ErrClosed) } @@ -52,7 +52,7 @@ func TestNewConcurrentTxnFromNonIterable(t *testing.T) { ctx := context.Background() rootstore := memory.NewDatastore(ctx) - txn, err := NewConcurrentTxnFrom(ctx, rootstore, false) + txn, err := NewConcurrentTxnFrom(ctx, rootstore, 0, false) require.NoError(t, err) err = txn.Commit(ctx) @@ -66,7 +66,7 @@ func TestNewConcurrentTxnFromNonIterableWithStoreClosed(t *testing.T) { err := rootstore.Close() require.NoError(t, err) - _, err = NewConcurrentTxnFrom(ctx, rootstore, false) + _, err = NewConcurrentTxnFrom(ctx, rootstore, 0, false) require.ErrorIs(t, err, badgerds.ErrClosed) } diff --git a/datastore/memory/txn.go b/datastore/memory/txn.go index 3cd7ab2bf9..7430077e46 100644 --- a/datastore/memory/txn.go +++ b/datastore/memory/txn.go @@ -123,9 +123,9 @@ func (t *basicTxn) GetSize(ctx context.Context, key ds.Key) (size int, err error // Has implements ds.Has. func (t *basicTxn) Has(ctx context.Context, key ds.Key) (exists bool, err error) { - t.ds.closeLk.RLock() - defer t.ds.closeLk.RUnlock() - if t.ds.closed { + t.closeLk.RLock() + defer t.closeLk.RUnlock() + if t.closed { return false, ErrClosed } @@ -162,7 +162,7 @@ func (t *basicTxn) Put(ctx context.Context, key ds.Key, value []byte) error { func (t *basicTxn) Query(ctx context.Context, q dsq.Query) (dsq.Results, error) { t.closeLk.RLock() defer t.closeLk.RUnlock() - if t.ds.closed { + if t.closed { return nil, ErrClosed } diff --git a/datastore/mocks/DAGStore.go b/datastore/mocks/dag_store.go similarity index 99% rename from datastore/mocks/DAGStore.go rename to datastore/mocks/dag_store.go index 1ca7d96d7b..8408013ccc 100644 --- a/datastore/mocks/DAGStore.go +++ b/datastore/mocks/dag_store.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.30.1. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks diff --git a/datastore/mocks/DSReaderWriter.go b/datastore/mocks/ds_reader_writer.go similarity index 99% rename from datastore/mocks/DSReaderWriter.go rename to datastore/mocks/ds_reader_writer.go index 3d822f6d2c..865086c697 100644 --- a/datastore/mocks/DSReaderWriter.go +++ b/datastore/mocks/ds_reader_writer.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.30.1. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks diff --git a/datastore/mocks/Results.go b/datastore/mocks/results.go similarity index 99% rename from datastore/mocks/Results.go rename to datastore/mocks/results.go index 69e19a420e..e1fee8f859 100644 --- a/datastore/mocks/Results.go +++ b/datastore/mocks/results.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.30.1. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks diff --git a/datastore/mocks/RootStore.go b/datastore/mocks/root_store.go similarity index 99% rename from datastore/mocks/RootStore.go rename to datastore/mocks/root_store.go index 96f9cb6256..836a059f68 100644 --- a/datastore/mocks/RootStore.go +++ b/datastore/mocks/root_store.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.30.1. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks diff --git a/datastore/mocks/Txn.go b/datastore/mocks/txn.go similarity index 84% rename from datastore/mocks/Txn.go rename to datastore/mocks/txn.go index 2fe024a9ad..dd3fb60def 100644 --- a/datastore/mocks/Txn.go +++ b/datastore/mocks/txn.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.30.1. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks @@ -226,6 +226,80 @@ func (_c *Txn_Headstore_Call) RunAndReturn(run func() datastore.DSReaderWriter) return _c } +// ID provides a mock function with given fields: +func (_m *Txn) ID() uint64 { + ret := _m.Called() + + var r0 uint64 + if rf, ok := ret.Get(0).(func() uint64); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(uint64) + } + + return r0 +} + +// Txn_ID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ID' +type Txn_ID_Call struct { + *mock.Call +} + +// ID is a helper method to define mock.On call +func (_e *Txn_Expecter) ID() *Txn_ID_Call { + return &Txn_ID_Call{Call: _e.mock.On("ID")} +} + +func (_c *Txn_ID_Call) Run(run func()) *Txn_ID_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Txn_ID_Call) Return(_a0 uint64) *Txn_ID_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Txn_ID_Call) RunAndReturn(run func() uint64) *Txn_ID_Call { + _c.Call.Return(run) + return _c +} + +// OnDiscard provides a mock function with given fields: fn +func (_m *Txn) OnDiscard(fn func()) { + _m.Called(fn) +} + +// Txn_OnDiscard_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'OnDiscard' +type Txn_OnDiscard_Call struct { + *mock.Call +} + +// OnDiscard is a helper method to define mock.On call +// - fn func() +func (_e *Txn_Expecter) OnDiscard(fn interface{}) *Txn_OnDiscard_Call { + return &Txn_OnDiscard_Call{Call: _e.mock.On("OnDiscard", fn)} +} + +func (_c *Txn_OnDiscard_Call) Run(run func(fn func())) *Txn_OnDiscard_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(func())) + }) + return _c +} + +func (_c *Txn_OnDiscard_Call) Return() *Txn_OnDiscard_Call { + _c.Call.Return() + return _c +} + +func (_c *Txn_OnDiscard_Call) RunAndReturn(run func(func())) *Txn_OnDiscard_Call { + _c.Call.Return(run) + return _c +} + // OnError provides a mock function with given fields: fn func (_m *Txn) OnError(fn func()) { _m.Called(fn) diff --git a/datastore/txn.go b/datastore/txn.go index 45b968ea98..d0fa3d2f35 100644 --- a/datastore/txn.go +++ b/datastore/txn.go @@ -22,6 +22,9 @@ import ( type Txn interface { MultiStore + // ID returns the unique immutable identifier for this transaction. + ID() uint64 + // Commit finalizes a transaction, attempting to commit it to the Datastore. // May return an error if the transaction has gone stale. The presence of an // error is an indication that the data was not committed to the Datastore. @@ -32,22 +35,31 @@ type Txn interface { // state of the Datastore, making it safe to defer. Discard(ctx context.Context) + // OnSuccess registers a function to be called when the transaction is committed. OnSuccess(fn func()) + + // OnError registers a function to be called when the transaction is rolled back. OnError(fn func()) + + // OnDiscard registers a function to be called when the transaction is discarded. + OnDiscard(fn func()) } type txn struct { t ds.Txn MultiStore + id uint64 + successFns []func() errorFns []func() + discardFns []func() } var _ Txn = (*txn)(nil) // NewTxnFrom returns a new Txn from the rootstore. -func NewTxnFrom(ctx context.Context, rootstore ds.TxnDatastore, readonly bool) (Txn, error) { +func NewTxnFrom(ctx context.Context, rootstore ds.TxnDatastore, id uint64, readonly bool) (Txn, error) { // check if our datastore natively supports iterable transaction, transactions or batching if iterableTxnStore, ok := rootstore.(iterable.IterableTxnDatastore); ok { rootTxn, err := iterableTxnStore.NewIterableTransaction(ctx, readonly) @@ -58,6 +70,8 @@ func NewTxnFrom(ctx context.Context, rootstore ds.TxnDatastore, readonly bool) ( return &txn{ rootTxn, multistore, + id, + []func(){}, []func(){}, []func(){}, }, nil @@ -73,24 +87,32 @@ func NewTxnFrom(ctx context.Context, rootstore ds.TxnDatastore, readonly bool) ( return &txn{ rootTxn, multistore, + id, + []func(){}, []func(){}, []func(){}, }, nil } +// ID returns the unique immutable identifier for this transaction. +func (t *txn) ID() uint64 { + return t.id +} + // Commit finalizes a transaction, attempting to commit it to the Datastore. func (t *txn) Commit(ctx context.Context) error { if err := t.t.Commit(ctx); err != nil { - t.runErrorFns(ctx) + runFns(t.errorFns) return err } - t.runSuccessFns(ctx) + runFns(t.successFns) return nil } // Discard throws away changes recorded in a transaction without committing. func (t *txn) Discard(ctx context.Context) { t.t.Discard(ctx) + runFns(t.discardFns) } // OnSuccess registers a function to be called when the transaction is committed. @@ -109,14 +131,16 @@ func (txn *txn) OnError(fn func()) { txn.errorFns = append(txn.errorFns, fn) } -func (txn *txn) runErrorFns(ctx context.Context) { - for _, fn := range txn.errorFns { - fn() +// OnDiscard registers a function to be called when the transaction is discarded. +func (txn *txn) OnDiscard(fn func()) { + if fn == nil { + return } + txn.discardFns = append(txn.discardFns, fn) } -func (txn *txn) runSuccessFns(ctx context.Context) { - for _, fn := range txn.successFns { +func runFns(fns []func()) { + for _, fn := range fns { fn() } } diff --git a/datastore/txn_test.go b/datastore/txn_test.go index dabb05beab..e46dbdae8f 100644 --- a/datastore/txn_test.go +++ b/datastore/txn_test.go @@ -14,11 +14,11 @@ import ( "context" "testing" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" ds "github.com/ipfs/go-datastore" "github.com/stretchr/testify/require" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" ) func TestNewTxnFrom(t *testing.T) { @@ -27,7 +27,7 @@ func TestNewTxnFrom(t *testing.T) { rootstore, err := badgerds.NewDatastore("", &opts) require.NoError(t, err) - txn, err := NewTxnFrom(ctx, rootstore, false) + txn, err := NewTxnFrom(ctx, rootstore, 0, false) require.NoError(t, err) err = txn.Commit(ctx) @@ -43,7 +43,7 @@ func TestNewTxnFromWithStoreClosed(t *testing.T) { err = rootstore.Close() require.NoError(t, err) - _, err = NewTxnFrom(ctx, rootstore, false) + _, err = NewTxnFrom(ctx, rootstore, 0, false) require.ErrorIs(t, err, badgerds.ErrClosed) } @@ -53,7 +53,7 @@ func TestOnSuccess(t *testing.T) { rootstore, err := badgerds.NewDatastore("", &opts) require.NoError(t, err) - txn, err := NewTxnFrom(ctx, rootstore, false) + txn, err := NewTxnFrom(ctx, rootstore, 0, false) require.NoError(t, err) txn.OnSuccess(nil) @@ -74,7 +74,7 @@ func TestOnError(t *testing.T) { rootstore, err := badgerds.NewDatastore("", &opts) require.NoError(t, err) - txn, err := NewTxnFrom(ctx, rootstore, false) + txn, err := NewTxnFrom(ctx, rootstore, 0, false) require.NoError(t, err) txn.OnError(nil) diff --git a/datastore/wrappedstore_test.go b/datastore/wrappedstore_test.go index eb576bb2f5..200af3e3f7 100644 --- a/datastore/wrappedstore_test.go +++ b/datastore/wrappedstore_test.go @@ -14,12 +14,12 @@ import ( "context" "testing" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" ds "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" "github.com/stretchr/testify/require" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/datastore/memory" ) diff --git a/db/collection.go b/db/collection.go index 3430684697..a9d3f5c403 100644 --- a/db/collection.go +++ b/db/collection.go @@ -16,6 +16,7 @@ import ( "encoding/json" "fmt" "strconv" + "strings" "github.com/fxamacker/cbor/v2" "github.com/ipfs/go-cid" @@ -230,9 +231,11 @@ func (db *db) createCollection( func (db *db) updateCollection( ctx context.Context, txn datastore.Txn, + existingDescriptionsByName map[string]client.CollectionDescription, + proposedDescriptionsByName map[string]client.CollectionDescription, desc client.CollectionDescription, ) (client.Collection, error) { - hasChanged, err := db.validateUpdateCollection(ctx, txn, desc) + hasChanged, err := db.validateUpdateCollection(ctx, txn, existingDescriptionsByName, proposedDescriptionsByName, desc) if err != nil { return nil, err } @@ -241,6 +244,20 @@ func (db *db) updateCollection( return db.getCollectionByName(ctx, txn, desc.Name) } + for _, field := range desc.Schema.Fields { + if field.RelationType.IsSet(client.Relation_Type_ONE) { + idFieldName := field.Name + "_id" + if _, ok := desc.Schema.GetField(idFieldName); !ok { + desc.Schema.Fields = append(desc.Schema.Fields, client.FieldDescription{ + Name: idFieldName, + Kind: client.FieldKind_DocKey, + RelationType: client.Relation_Type_INTERNAL_ID, + RelationName: field.RelationName, + }) + } + } + } + for i, field := range desc.Schema.Fields { if field.ID == client.FieldID(0) { // This is not wonderful and will probably break when we add the ability @@ -311,17 +328,18 @@ func (db *db) updateCollection( func (db *db) validateUpdateCollection( ctx context.Context, txn datastore.Txn, + existingDescriptionsByName map[string]client.CollectionDescription, + proposedDescriptionsByName map[string]client.CollectionDescription, proposedDesc client.CollectionDescription, ) (bool, error) { - existingCollection, err := db.getCollectionByName(ctx, txn, proposedDesc.Name) - if err != nil { - if errors.Is(err, ds.ErrNotFound) { - // Original error is quite unhelpful to users at the moment so we return a custom one - return false, NewErrAddCollectionWithPatch(proposedDesc.Name) - } - return false, err + if proposedDesc.Name == "" { + return false, ErrCollectionNameEmpty + } + + existingDesc, collectionExists := existingDescriptionsByName[proposedDesc.Name] + if !collectionExists { + return false, NewErrAddCollectionWithPatch(proposedDesc.Name) } - existingDesc := existingCollection.Description() if proposedDesc.ID != existingDesc.ID { return false, NewErrCollectionIDDoesntMatch(proposedDesc.Name, existingDesc.ID, proposedDesc.ID) @@ -346,7 +364,7 @@ func (db *db) validateUpdateCollection( return false, ErrCannotSetVersionID } - hasChangedFields, err := validateUpdateCollectionFields(existingDesc, proposedDesc) + hasChangedFields, err := validateUpdateCollectionFields(proposedDescriptionsByName, existingDesc, proposedDesc) if err != nil { return hasChangedFields, err } @@ -356,6 +374,7 @@ func (db *db) validateUpdateCollection( } func validateUpdateCollectionFields( + descriptionsByName map[string]client.CollectionDescription, existingDesc client.CollectionDescription, proposedDesc client.CollectionDescription, ) (bool, error) { @@ -386,7 +405,130 @@ func validateUpdateCollectionFields( if !fieldAlreadyExists && (proposedField.Kind == client.FieldKind_FOREIGN_OBJECT || proposedField.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY) { - return false, NewErrCannotAddRelationalField(proposedField.Name, proposedField.Kind) + if proposedField.Schema == "" { + return false, NewErrRelationalFieldMissingSchema(proposedField.Name, proposedField.Kind) + } + + relatedDesc, relatedDescFound := descriptionsByName[proposedField.Schema] + + if !relatedDescFound { + return false, NewErrSchemaNotFound(proposedField.Name, proposedField.Schema) + } + + if proposedField.Kind == client.FieldKind_FOREIGN_OBJECT { + if !proposedField.RelationType.IsSet(client.Relation_Type_ONE) || + !(proposedField.RelationType.IsSet(client.Relation_Type_ONEONE) || + proposedField.RelationType.IsSet(client.Relation_Type_ONEMANY)) { + return false, NewErrRelationalFieldInvalidRelationType( + proposedField.Name, + fmt.Sprintf( + "%v and %v or %v, with optionally %v", + client.Relation_Type_ONE, + client.Relation_Type_ONEONE, + client.Relation_Type_ONEMANY, + client.Relation_Type_Primary, + ), + proposedField.RelationType, + ) + } + } + + if proposedField.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { + if !proposedField.RelationType.IsSet(client.Relation_Type_MANY) || + !proposedField.RelationType.IsSet(client.Relation_Type_ONEMANY) { + return false, NewErrRelationalFieldInvalidRelationType( + proposedField.Name, + client.Relation_Type_MANY|client.Relation_Type_ONEMANY, + proposedField.RelationType, + ) + } + } + + if proposedField.RelationName == "" { + return false, NewErrRelationalFieldMissingRelationName(proposedField.Name) + } + + if proposedField.RelationType.IsSet(client.Relation_Type_Primary) { + if proposedField.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { + return false, NewErrPrimarySideOnMany(proposedField.Name) + } + } + + if proposedField.Kind == client.FieldKind_FOREIGN_OBJECT { + idFieldName := proposedField.Name + request.RelatedObjectID + idField, idFieldFound := proposedDesc.Schema.GetField(idFieldName) + if idFieldFound { + if idField.Kind != client.FieldKind_DocKey { + return false, NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocKey, idField.Kind) + } + + if idField.RelationType != client.Relation_Type_INTERNAL_ID { + return false, NewErrRelationalFieldInvalidRelationType( + idField.Name, + client.Relation_Type_INTERNAL_ID, + idField.RelationType, + ) + } + + if idField.RelationName == "" { + return false, NewErrRelationalFieldMissingRelationName(idField.Name) + } + } + } + + var relatedFieldFound bool + var relatedField client.FieldDescription + for _, field := range relatedDesc.Schema.Fields { + if field.RelationName == proposedField.RelationName && + !field.RelationType.IsSet(client.Relation_Type_INTERNAL_ID) && + !(relatedDesc.Name == proposedDesc.Name && field.Name == proposedField.Name) { + relatedFieldFound = true + relatedField = field + break + } + } + + if !relatedFieldFound { + return false, client.NewErrRelationOneSided(proposedField.Name, proposedField.Schema) + } + + if !(proposedField.RelationType.IsSet(client.Relation_Type_Primary) || + relatedField.RelationType.IsSet(client.Relation_Type_Primary)) { + return false, NewErrPrimarySideNotDefined(proposedField.RelationName) + } + + if proposedField.RelationType.IsSet(client.Relation_Type_Primary) && + relatedField.RelationType.IsSet(client.Relation_Type_Primary) { + return false, NewErrBothSidesPrimary(proposedField.RelationName) + } + + if proposedField.RelationType.IsSet(client.Relation_Type_ONEONE) && + relatedField.Kind != client.FieldKind_FOREIGN_OBJECT { + return false, NewErrRelatedFieldKindMismatch( + proposedField.RelationName, + client.FieldKind_FOREIGN_OBJECT, + relatedField.Kind, + ) + } + + if proposedField.RelationType.IsSet(client.Relation_Type_ONEMANY) && + proposedField.Kind == client.FieldKind_FOREIGN_OBJECT && + relatedField.Kind != client.FieldKind_FOREIGN_OBJECT_ARRAY { + return false, NewErrRelatedFieldKindMismatch( + proposedField.RelationName, + client.FieldKind_FOREIGN_OBJECT_ARRAY, + relatedField.Kind, + ) + } + + if proposedField.RelationType.IsSet(client.Relation_Type_ONEONE) && + !relatedField.RelationType.IsSet(client.Relation_Type_ONEONE) { + return false, NewErrRelatedFieldRelationTypeMismatch( + proposedField.RelationName, + client.Relation_Type_ONEONE, + relatedField.RelationType, + ) + } } if _, isDuplicate := newFieldNames[proposedField.Name]; isDuplicate { @@ -816,16 +958,19 @@ func (c *collection) Save(ctx context.Context, doc *client.Document) error { return err } - if !isDeleted { - if exists { - err = c.update(ctx, txn, doc) - } else { - err = c.create(ctx, txn, doc) - } - if err != nil { - return err - } + if isDeleted { + return NewErrDocumentDeleted(doc.Key().String()) } + + if exists { + err = c.update(ctx, txn, doc) + } else { + err = c.create(ctx, txn, doc) + } + if err != nil { + return err + } + return c.commitImplicitTxn(ctx, txn) } @@ -890,6 +1035,11 @@ func (c *collection) save( continue } + err = c.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, doc.Key().String(), fieldDescription, val.Value()) + if err != nil { + return cid.Undef, err + } + node, _, err := c.saveDocValue(ctx, txn, fieldKey, val) if err != nil { return cid.Undef, err @@ -953,6 +1103,84 @@ func (c *collection) save( return headNode.Cid(), nil } +func (c *collection) validateOneToOneLinkDoesntAlreadyExist( + ctx context.Context, + txn datastore.Txn, + docKey string, + fieldDescription client.FieldDescription, + value any, +) error { + if !fieldDescription.RelationType.IsSet(client.Relation_Type_INTERNAL_ID) { + return nil + } + + if value == nil { + return nil + } + + objFieldDescription, ok := c.desc.Schema.GetField(strings.TrimSuffix(fieldDescription.Name, request.RelatedObjectID)) + if !ok { + return client.NewErrFieldNotExist(strings.TrimSuffix(fieldDescription.Name, request.RelatedObjectID)) + } + if !objFieldDescription.RelationType.IsSet(client.Relation_Type_ONEONE) { + return nil + } + + filter := fmt.Sprintf( + `{_and: [{%s: {_ne: "%s"}}, {%s: {_eq: "%s"}}]}`, + request.KeyFieldName, + docKey, + fieldDescription.Name, + value, + ) + selectionPlan, err := c.makeSelectionPlan(ctx, txn, filter) + if err != nil { + return err + } + + err = selectionPlan.Init() + if err != nil { + closeErr := selectionPlan.Close() + if closeErr != nil { + return errors.Wrap(err.Error(), closeErr) + } + return err + } + + if err = selectionPlan.Start(); err != nil { + closeErr := selectionPlan.Close() + if closeErr != nil { + return errors.Wrap(err.Error(), closeErr) + } + return err + } + + alreadyLinked, err := selectionPlan.Next() + if err != nil { + closeErr := selectionPlan.Close() + if closeErr != nil { + return errors.Wrap(err.Error(), closeErr) + } + return err + } + + if alreadyLinked { + existingDocument := selectionPlan.Value() + err := selectionPlan.Close() + if err != nil { + return err + } + return NewErrOneOneAlreadyLinked(docKey, existingDocument.GetKey(), objFieldDescription.RelationName) + } + + err = selectionPlan.Close() + if err != nil { + return err + } + + return nil +} + // Delete will attempt to delete a document by key will return true if a deletion is successful, // and return false, along with an error, if it cannot. // If the document doesn't exist, then it will return false, and a ErrDocumentNotFound error. @@ -1153,13 +1381,6 @@ func (c *collection) commitImplicitTxn(ctx context.Context, txn datastore.Txn) e return nil } -func (c *collection) getPrimaryKey(docKey string) core.PrimaryDataStoreKey { - return core.PrimaryDataStoreKey{ - CollectionId: fmt.Sprint(c.colID), - DocKey: docKey, - } -} - func (c *collection) getPrimaryKeyFromDocKey(docKey client.DocKey) core.PrimaryDataStoreKey { return core.PrimaryDataStoreKey{ CollectionId: fmt.Sprint(c.colID), diff --git a/db/collection_get.go b/db/collection_get.go index 17e113231e..8262ff44ba 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -17,6 +17,7 @@ import ( "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" + "github.com/sourcenetwork/defradb/db/fetcher" ) func (c *collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { @@ -70,7 +71,7 @@ func (c *collection) get( } // return first matched decoded doc - doc, _, err := df.FetchNextDecoded(ctx) + encodedDoc, _, err := df.FetchNext(ctx) if err != nil { _ = df.Close() return nil, err @@ -81,5 +82,14 @@ func (c *collection) get( return nil, err } + if encodedDoc == nil { + return nil, nil + } + + doc, err := fetcher.Decode(encodedDoc) + if err != nil { + return nil, err + } + return doc, nil } diff --git a/db/collection_index.go b/db/collection_index.go index a3a45ee7d4..791817a0a3 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -24,6 +24,7 @@ import ( "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" + "github.com/sourcenetwork/defradb/db/fetcher" "github.com/sourcenetwork/defradb/request/graphql/schema" ) @@ -265,16 +266,21 @@ func (c *collection) iterateAllDocs( return err } - var doc *client.Document for { - doc, _, err = df.FetchNextDecoded(ctx) + encodedDoc, _, err := df.FetchNext(ctx) if err != nil { _ = df.Close() return err } - if doc == nil { + if encodedDoc == nil { break } + + doc, err := fetcher.Decode(encodedDoc) + if err != nil { + return err + } + err = exec(doc) if err != nil { return err diff --git a/db/collection_update.go b/db/collection_update.go index b945ec4592..1a15482935 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -12,18 +12,16 @@ package db import ( "context" - "fmt" "strings" - cbor "github.com/fxamacker/cbor/v2" + ds "github.com/ipfs/go-datastore" "github.com/sourcenetwork/immutable" "github.com/valyala/fastjson" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/events" + "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/planner" ) @@ -133,16 +131,17 @@ func (c *collection) updateWithKey( if err != nil { return nil, err } - v, err := doc.ToMap() - if err != nil { - return nil, err - } if isPatch { // todo } else { - err = c.applyMerge(ctx, txn, v, parsedUpdater.GetObject()) + err = c.applyMergeToDoc(doc, parsedUpdater.GetObject()) + } + if err != nil { + return nil, err } + + _, err = c.save(ctx, txn, doc, false) if err != nil { return nil, err } @@ -180,20 +179,21 @@ func (c *collection) updateWithKeys( if err != nil { return nil, err } - v, err := doc.ToMap() - if err != nil { - return nil, err - } if isPatch { // todo } else { - err = c.applyMerge(ctx, txn, v, parsedUpdater.GetObject()) + err = c.applyMergeToDoc(doc, parsedUpdater.GetObject()) } if err != nil { return nil, err } + _, err = c.save(ctx, txn, doc, false) + if err != nil { + return nil, err + } + results.DocKeys[i] = key.String() results.Count++ } @@ -262,141 +262,68 @@ func (c *collection) updateWithFilter( } // Get the document, and apply the patch - doc := docMap.ToMap(selectionPlan.Value()) + docAsMap := docMap.ToMap(selectionPlan.Value()) + doc, err := client.NewDocFromMap(docAsMap) + if err != nil { + return nil, err + } + if isPatch { // todo } else if isMerge { // else is fine here - err = c.applyMerge(ctx, txn, doc, parsedUpdater.GetObject()) + err = c.applyMergeToDoc(doc, parsedUpdater.GetObject()) } if err != nil { return nil, err } + _, err = c.save(ctx, txn, doc, false) + if err != nil { + return nil, err + } + // add successful updated doc to results - results.DocKeys = append(results.DocKeys, doc[request.KeyFieldName].(string)) + results.DocKeys = append(results.DocKeys, doc.Key().String()) results.Count++ } return results, nil } -func (c *collection) applyMerge( - ctx context.Context, - txn datastore.Txn, - doc map[string]any, +// applyMergeToDoc applies the given json merge to the given Defra doc. +// +// It does not save the document. +func (c *collection) applyMergeToDoc( + doc *client.Document, merge *fastjson.Object, ) error { - keyStr, ok := doc["_key"].(string) - if !ok { - return ErrDocMissingKey - } - key := c.getPrimaryKey(keyStr) - links := make([]core.DAGLink, 0) - mergeMap := make(map[string]*fastjson.Value) merge.Visit(func(k []byte, v *fastjson.Value) { mergeMap[string(k)] = v }) - mergeCBOR := make(map[string]any) - for mfield, mval := range mergeMap { - if mval.Type() == fastjson.TypeObject { - return ErrInvalidMergeValueType + fd, isValidField := c.desc.Schema.GetField(mfield) + if !isValidField { + return client.NewErrFieldNotExist(mfield) } - fd, isValidAliasField := c.desc.Schema.GetField(mfield + request.RelatedObjectID) - if isValidAliasField { - // Overwrite the key with aliased name to the internal related object name. - oldKey := mfield - mfield = mfield + request.RelatedObjectID - mergeMap[mfield] = mval - delete(mergeMap, oldKey) - } else { - var isValidField bool - fd, isValidField = c.desc.Schema.GetField(mfield) + if fd.Kind == client.FieldKind_FOREIGN_OBJECT { + fd, isValidField = c.desc.Schema.GetField(mfield + request.RelatedObjectID) if !isValidField { return client.NewErrFieldNotExist(mfield) } } - relationFieldDescription, isSecondaryRelationID := c.isSecondaryIDField(fd) - if isSecondaryRelationID { - primaryId, err := getString(mval) - if err != nil { - return err - } - - err = c.patchPrimaryDoc(ctx, txn, relationFieldDescription, keyStr, primaryId) - if err != nil { - return err - } - - // If this field was a secondary relation ID the related document will have been - // updated instead and we should discard this merge item - continue - } - cborVal, err := validateFieldSchema(mval, fd) if err != nil { return err } - mergeCBOR[mfield] = cborVal - - val := client.NewCBORValue(fd.Typ, cborVal) - fieldKey, fieldExists := c.tryGetFieldKey(key, mfield) - if !fieldExists { - return client.NewErrFieldNotExist(mfield) - } - c, _, err := c.saveDocValue(ctx, txn, fieldKey, val) + err = doc.Set(fd.Name, cborVal) if err != nil { return err } - - links = append(links, core.DAGLink{ - Name: mfield, - Cid: c.Cid(), - }) - } - - // Update CompositeDAG - em, err := cbor.CanonicalEncOptions().EncMode() - if err != nil { - return err - } - buf, err := em.Marshal(mergeCBOR) - if err != nil { - return err - } - - headNode, priority, err := c.saveValueToMerkleCRDT( - ctx, - txn, - key.ToDataStoreKey(), - client.COMPOSITE, - buf, - links, - client.Active, - ) - if err != nil { - return err - } - - if c.db.events.Updates.HasValue() { - txn.OnSuccess( - func() { - c.db.events.Updates.Value().Publish( - events.Update{ - DocKey: keyStr, - Cid: headNode.Cid(), - SchemaID: c.schemaID, - Block: headNode, - Priority: priority, - }, - ) - }, - ) } return nil @@ -438,13 +365,45 @@ func (c *collection) patchPrimaryDoc( } primaryCol = primaryCol.WithTxn(txn) - primaryField, _ := primaryCol.Description().GetRelation(relationFieldDescription.RelationName) + primaryField, ok := primaryCol.Description().GetRelation(relationFieldDescription.RelationName) + if !ok { + return client.NewErrFieldNotExist(relationFieldDescription.RelationName) + } + + primaryIDField, ok := primaryCol.Description().Schema.GetField(primaryField.Name + request.RelatedObjectID) + if !ok { + return client.NewErrFieldNotExist(primaryField.Name + request.RelatedObjectID) + } - _, err = primaryCol.UpdateWithKey( + doc, err := primaryCol.Get( ctx, primaryDockey, - fmt.Sprintf(`{"%s": "%s"}`, primaryField.Name+request.RelatedObjectID, docKey), + false, ) + if err != nil && !errors.Is(err, ds.ErrNotFound) { + return err + } + + // If the document doesn't exist then there is nothing to update. + if doc == nil { + return nil + } + + existingVal, err := doc.GetValue(primaryIDField.Name) + if err != nil && !errors.Is(err, client.ErrFieldNotExist) { + return err + } + + if existingVal != nil && existingVal.Value() != "" && existingVal.Value() != docKey { + return NewErrOneOneAlreadyLinked(docKey, fieldValue, relationFieldDescription.RelationName) + } + + err = doc.Set(primaryIDField.Name, docKey) + if err != nil { + return err + } + + err = primaryCol.Update(ctx, doc) if err != nil { return err } diff --git a/db/db.go b/db/db.go index 8ffda296b4..0bc9a361c3 100644 --- a/db/db.go +++ b/db/db.go @@ -17,6 +17,7 @@ package db import ( "context" "sync" + "sync/atomic" blockstore "github.com/ipfs/boxo/blockstore" ds "github.com/ipfs/go-datastore" @@ -70,6 +71,9 @@ type db struct { // The options used to init the database options any + + // The ID of the last transaction created. + previousTxnID atomic.Uint64 } // Functional option type. @@ -138,7 +142,7 @@ func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option // lensPoolSize may be set by `options`, and because they are funcs on db // we have to mutate `db` here to set the registry. - db.lensRegistry = lens.NewRegistry(db.lensPoolSize) + db.lensRegistry = lens.NewRegistry(db.lensPoolSize, db) err = db.initialize(ctx) if err != nil { @@ -150,19 +154,22 @@ func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option // NewTxn creates a new transaction. func (db *db) NewTxn(ctx context.Context, readonly bool) (datastore.Txn, error) { - return datastore.NewTxnFrom(ctx, db.rootstore, readonly) + txnId := db.previousTxnID.Add(1) + return datastore.NewTxnFrom(ctx, db.rootstore, txnId, readonly) } // NewConcurrentTxn creates a new transaction that supports concurrent API calls. func (db *db) NewConcurrentTxn(ctx context.Context, readonly bool) (datastore.Txn, error) { - return datastore.NewConcurrentTxnFrom(ctx, db.rootstore, readonly) + txnId := db.previousTxnID.Add(1) + return datastore.NewConcurrentTxnFrom(ctx, db.rootstore, txnId, readonly) } // WithTxn returns a new [client.Store] that respects the given transaction. func (db *db) WithTxn(txn datastore.Txn) client.Store { return &explicitTxnDB{ - db: db, - txn: txn, + db: db, + txn: txn, + lensRegistry: db.lensRegistry.WithTxn(txn), } } @@ -210,7 +217,7 @@ func (db *db) initialize(ctx context.Context) error { return err } - err = db.lensRegistry.ReloadLenses(ctx, txn) + err = db.lensRegistry.ReloadLenses(ctx) if err != nil { return err } diff --git a/db/db_test.go b/db/db_test.go index 22bd881abe..c1a9648f36 100644 --- a/db/db_test.go +++ b/db/db_test.go @@ -14,14 +14,14 @@ import ( "context" "testing" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" dag "github.com/ipfs/boxo/ipld/merkledag" "github.com/stretchr/testify/assert" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" corecrdt "github.com/sourcenetwork/defradb/core/crdt" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/merkle/clock" ) diff --git a/db/errors.go b/db/errors.go index e5b55dcf1a..4a456cd41a 100644 --- a/db/errors.go +++ b/db/errors.go @@ -16,62 +16,75 @@ import ( ) const ( - errFailedToGetHeads string = "failed to get document heads" - errFailedToCreateCollectionQuery string = "failed to create collection prefix query" - errFailedToGetCollection string = "failed to get collection" - errFailedToGetAllCollections string = "failed to get all collections" - errDocVerification string = "the document verification failed" - errAddingP2PCollection string = "cannot add collection ID" - errRemovingP2PCollection string = "cannot remove collection ID" - errAddCollectionWithPatch string = "unknown collection, adding collections via patch is not supported" - errCollectionIDDoesntMatch string = "CollectionID does not match existing" - errSchemaIDDoesntMatch string = "SchemaID does not match existing" - errCannotModifySchemaName string = "modifying the schema name is not supported" - errCannotSetVersionID string = "setting the VersionID is not supported. It is updated automatically" - errCannotSetFieldID string = "explicitly setting a field ID value is not supported" - errCannotAddRelationalField string = "the adding of new relation fields is not yet supported" - errDuplicateField string = "duplicate field" - errCannotMutateField string = "mutating an existing field is not supported" - errCannotMoveField string = "moving fields is not currently supported" - errInvalidCRDTType string = "only default or LWW (last writer wins) CRDT types are supported" - errCannotDeleteField string = "deleting an existing field is not supported" - errFieldKindNotFound string = "no type found for given name" - errDocumentAlreadyExists string = "a document with the given dockey already exists" - errDocumentDeleted string = "a document with the given dockey has been deleted" - errIndexMissingFields string = "index missing fields" - errNonZeroIndexIDProvided string = "non-zero index ID provided" - errIndexFieldMissingName string = "index field missing name" - errIndexFieldMissingDirection string = "index field missing direction" - errIndexSingleFieldWrongDirection string = "wrong direction for index with a single field" - errIndexWithNameAlreadyExists string = "index with name already exists" - errInvalidStoredIndex string = "invalid stored index" - errInvalidStoredIndexKey string = "invalid stored index key" - errNonExistingFieldForIndex string = "creating an index on a non-existing property" - errCollectionDoesntExisting string = "collection with given name doesn't exist" - errFailedToStoreIndexedField string = "failed to store indexed field" - errFailedToReadStoredIndexDesc string = "failed to read stored index description" - errCanNotDeleteIndexedField string = "can not delete indexed field" - errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" - errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" - errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" - errIndexWithNameDoesNotExists string = "index with name doesn't exists" - errInvalidFieldValue string = "invalid field value" - errUnsupportedIndexFieldType string = "unsupported index field type" - errIndexDescriptionHasNoFields string = "index description has no fields" - errIndexDescHasNonExistingField string = "index description has non existing field" - errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" - errCreateFile string = "failed to create file" - errOpenFile string = "failed to open file" - errCloseFile string = "failed to close file" - errRemoveFile string = "failed to remove file" - errFailedToReadByte string = "failed to read byte" - errFailedToWriteString string = "failed to write string" - errJSONDecode string = "failed to decode JSON" - errDocFromMap string = "failed to create a new doc from map" - errDocCreate string = "failed to save a new doc to collection" - errDocUpdate string = "failed to update doc to collection" - errExpectedJSONObject string = "expected JSON object" - errExpectedJSONArray string = "expected JSON array" + errFailedToGetHeads string = "failed to get document heads" + errFailedToCreateCollectionQuery string = "failed to create collection prefix query" + errFailedToGetCollection string = "failed to get collection" + errFailedToGetAllCollections string = "failed to get all collections" + errDocVerification string = "the document verification failed" + errAddingP2PCollection string = "cannot add collection ID" + errRemovingP2PCollection string = "cannot remove collection ID" + errAddCollectionWithPatch string = "unknown collection, adding collections via patch is not supported" + errCollectionIDDoesntMatch string = "CollectionID does not match existing" + errSchemaIDDoesntMatch string = "SchemaID does not match existing" + errCannotModifySchemaName string = "modifying the schema name is not supported" + errCannotSetVersionID string = "setting the VersionID is not supported. It is updated automatically" + errCannotSetFieldID string = "explicitly setting a field ID value is not supported" + errRelationalFieldMissingSchema string = "a `Schema` [name] must be provided when adding a new relation field" + errRelationalFieldInvalidRelationType string = "invalid RelationType" + errRelationalFieldMissingIDField string = "missing id field for relation object field" + errRelationalFieldMissingRelationName string = "missing relation name" + errPrimarySideNotDefined string = "primary side of relation not defined" + errPrimarySideOnMany string = "cannot set the many side of a relation as primary" + errBothSidesPrimary string = "both sides of a relation cannot be primary" + errRelatedFieldKindMismatch string = "invalid Kind of the related field" + errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" + errRelationalFieldIDInvalidType string = "relational id field of invalid kind" + errDuplicateField string = "duplicate field" + errCannotMutateField string = "mutating an existing field is not supported" + errCannotMoveField string = "moving fields is not currently supported" + errInvalidCRDTType string = "only default or LWW (last writer wins) CRDT types are supported" + errCannotDeleteField string = "deleting an existing field is not supported" + errFieldKindNotFound string = "no type found for given name" + errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" + errSchemaNotFound string = "no schema found for given name" + errDocumentAlreadyExists string = "a document with the given dockey already exists" + errDocumentDeleted string = "a document with the given dockey has been deleted" + errIndexMissingFields string = "index missing fields" + errNonZeroIndexIDProvided string = "non-zero index ID provided" + errIndexFieldMissingName string = "index field missing name" + errIndexFieldMissingDirection string = "index field missing direction" + errIndexSingleFieldWrongDirection string = "wrong direction for index with a single field" + errIndexWithNameAlreadyExists string = "index with name already exists" + errInvalidStoredIndex string = "invalid stored index" + errInvalidStoredIndexKey string = "invalid stored index key" + errNonExistingFieldForIndex string = "creating an index on a non-existing property" + errCollectionDoesntExisting string = "collection with given name doesn't exist" + errFailedToStoreIndexedField string = "failed to store indexed field" + errFailedToReadStoredIndexDesc string = "failed to read stored index description" + errCanNotDeleteIndexedField string = "can not delete indexed field" + errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" + errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" + errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" + errIndexWithNameDoesNotExists string = "index with name doesn't exists" + errInvalidFieldValue string = "invalid field value" + errUnsupportedIndexFieldType string = "unsupported index field type" + errIndexDescriptionHasNoFields string = "index description has no fields" + errIndexDescHasNonExistingField string = "index description has non existing field" + errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" + errCreateFile string = "failed to create file" + errOpenFile string = "failed to open file" + errCloseFile string = "failed to close file" + errRemoveFile string = "failed to remove file" + errFailedToReadByte string = "failed to read byte" + errFailedToWriteString string = "failed to write string" + errJSONDecode string = "failed to decode JSON" + errDocFromMap string = "failed to create a new doc from map" + errDocCreate string = "failed to save a new doc to collection" + errDocUpdate string = "failed to update doc to collection" + errExpectedJSONObject string = "expected JSON object" + errExpectedJSONArray string = "expected JSON array" + errOneOneAlreadyLinked string = "target document is already linked to another document" + errIndexDoesNotMatchName string = "the index used does not match the given name" ) var ( @@ -90,53 +103,66 @@ var ( ErrInvalidMergeValueType = errors.New( "the type of value in the merge patch doesn't match the schema", ) - ErrMissingDocFieldToUpdate = errors.New("missing document field to update") - ErrDocMissingKey = errors.New("document is missing key") - ErrInvalidFilter = errors.New("invalid filter") - ErrInvalidOpPath = errors.New("invalid patch op path") - ErrDocumentAlreadyExists = errors.New(errDocumentAlreadyExists) - ErrDocumentDeleted = errors.New(errDocumentDeleted) - ErrUnknownCRDTArgument = errors.New("invalid CRDT arguments") - ErrUnknownCRDT = errors.New("unknown crdt") - ErrSchemaFirstFieldDocKey = errors.New("collection schema first field must be a DocKey") - ErrCollectionAlreadyExists = errors.New("collection already exists") - ErrCollectionNameEmpty = errors.New("collection name can't be empty") - ErrSchemaIDEmpty = errors.New("schema ID can't be empty") - ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") - ErrKeyEmpty = errors.New("key cannot be empty") - ErrAddingP2PCollection = errors.New(errAddingP2PCollection) - ErrRemovingP2PCollection = errors.New(errRemovingP2PCollection) - ErrAddCollectionWithPatch = errors.New(errAddCollectionWithPatch) - ErrCollectionIDDoesntMatch = errors.New(errCollectionIDDoesntMatch) - ErrSchemaIDDoesntMatch = errors.New(errSchemaIDDoesntMatch) - ErrCannotModifySchemaName = errors.New(errCannotModifySchemaName) - ErrCannotSetVersionID = errors.New(errCannotSetVersionID) - ErrCannotSetFieldID = errors.New(errCannotSetFieldID) - ErrCannotAddRelationalField = errors.New(errCannotAddRelationalField) - ErrDuplicateField = errors.New(errDuplicateField) - ErrCannotMutateField = errors.New(errCannotMutateField) - ErrCannotMoveField = errors.New(errCannotMoveField) - ErrInvalidCRDTType = errors.New(errInvalidCRDTType) - ErrCannotDeleteField = errors.New(errCannotDeleteField) - ErrFieldKindNotFound = errors.New(errFieldKindNotFound) - ErrIndexMissingFields = errors.New(errIndexMissingFields) - ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) - ErrIndexFieldMissingDirection = errors.New(errIndexFieldMissingDirection) - ErrIndexSingleFieldWrongDirection = errors.New(errIndexSingleFieldWrongDirection) - ErrCanNotChangeIndexWithPatch = errors.New(errCanNotChangeIndexWithPatch) - ErrFieldOrAliasToFieldNotExist = errors.New(errFieldOrAliasToFieldNotExist) - ErrCreateFile = errors.New(errCreateFile) - ErrOpenFile = errors.New(errOpenFile) - ErrCloseFile = errors.New(errCloseFile) - ErrRemoveFile = errors.New(errRemoveFile) - ErrFailedToReadByte = errors.New(errFailedToReadByte) - ErrFailedToWriteString = errors.New(errFailedToWriteString) - ErrJSONDecode = errors.New(errJSONDecode) - ErrDocFromMap = errors.New(errDocFromMap) - ErrDocCreate = errors.New(errDocCreate) - ErrDocUpdate = errors.New(errDocUpdate) - ErrExpectedJSONObject = errors.New(errExpectedJSONObject) - ErrExpectedJSONArray = errors.New(errExpectedJSONArray) + ErrMissingDocFieldToUpdate = errors.New("missing document field to update") + ErrDocMissingKey = errors.New("document is missing key") + ErrInvalidFilter = errors.New("invalid filter") + ErrInvalidOpPath = errors.New("invalid patch op path") + ErrDocumentAlreadyExists = errors.New(errDocumentAlreadyExists) + ErrDocumentDeleted = errors.New(errDocumentDeleted) + ErrUnknownCRDTArgument = errors.New("invalid CRDT arguments") + ErrUnknownCRDT = errors.New("unknown crdt") + ErrSchemaFirstFieldDocKey = errors.New("collection schema first field must be a DocKey") + ErrCollectionAlreadyExists = errors.New("collection already exists") + ErrCollectionNameEmpty = errors.New("collection name can't be empty") + ErrSchemaIDEmpty = errors.New("schema ID can't be empty") + ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") + ErrKeyEmpty = errors.New("key cannot be empty") + ErrAddingP2PCollection = errors.New(errAddingP2PCollection) + ErrRemovingP2PCollection = errors.New(errRemovingP2PCollection) + ErrAddCollectionWithPatch = errors.New(errAddCollectionWithPatch) + ErrCollectionIDDoesntMatch = errors.New(errCollectionIDDoesntMatch) + ErrSchemaIDDoesntMatch = errors.New(errSchemaIDDoesntMatch) + ErrCannotModifySchemaName = errors.New(errCannotModifySchemaName) + ErrCannotSetVersionID = errors.New(errCannotSetVersionID) + ErrCannotSetFieldID = errors.New(errCannotSetFieldID) + ErrRelationalFieldMissingSchema = errors.New(errRelationalFieldMissingSchema) + ErrRelationalFieldInvalidRelationType = errors.New(errRelationalFieldInvalidRelationType) + ErrRelationalFieldMissingIDField = errors.New(errRelationalFieldMissingIDField) + ErrRelationalFieldMissingRelationName = errors.New(errRelationalFieldMissingRelationName) + ErrPrimarySideNotDefined = errors.New(errPrimarySideNotDefined) + ErrPrimarySideOnMany = errors.New(errPrimarySideOnMany) + ErrBothSidesPrimary = errors.New(errBothSidesPrimary) + ErrRelatedFieldKindMismatch = errors.New(errRelatedFieldKindMismatch) + ErrRelatedFieldRelationTypeMismatch = errors.New(errRelatedFieldRelationTypeMismatch) + ErrRelationalFieldIDInvalidType = errors.New(errRelationalFieldIDInvalidType) + ErrDuplicateField = errors.New(errDuplicateField) + ErrCannotMutateField = errors.New(errCannotMutateField) + ErrCannotMoveField = errors.New(errCannotMoveField) + ErrInvalidCRDTType = errors.New(errInvalidCRDTType) + ErrCannotDeleteField = errors.New(errCannotDeleteField) + ErrFieldKindNotFound = errors.New(errFieldKindNotFound) + ErrFieldKindDoesNotMatchFieldSchema = errors.New(errFieldKindDoesNotMatchFieldSchema) + ErrSchemaNotFound = errors.New(errSchemaNotFound) + ErrIndexMissingFields = errors.New(errIndexMissingFields) + ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) + ErrIndexFieldMissingDirection = errors.New(errIndexFieldMissingDirection) + ErrIndexSingleFieldWrongDirection = errors.New(errIndexSingleFieldWrongDirection) + ErrCanNotChangeIndexWithPatch = errors.New(errCanNotChangeIndexWithPatch) + ErrFieldOrAliasToFieldNotExist = errors.New(errFieldOrAliasToFieldNotExist) + ErrCreateFile = errors.New(errCreateFile) + ErrOpenFile = errors.New(errOpenFile) + ErrCloseFile = errors.New(errCloseFile) + ErrRemoveFile = errors.New(errRemoveFile) + ErrFailedToReadByte = errors.New(errFailedToReadByte) + ErrFailedToWriteString = errors.New(errFailedToWriteString) + ErrJSONDecode = errors.New(errJSONDecode) + ErrDocFromMap = errors.New(errDocFromMap) + ErrDocCreate = errors.New(errDocCreate) + ErrDocUpdate = errors.New(errDocUpdate) + ErrExpectedJSONObject = errors.New(errExpectedJSONObject) + ErrExpectedJSONArray = errors.New(errExpectedJSONArray) + ErrOneOneAlreadyLinked = errors.New(errOneOneAlreadyLinked) + ErrIndexDoesNotMatchName = errors.New(errIndexDoesNotMatchName) ) // NewErrFieldOrAliasToFieldNotExist returns an error indicating that the given field or an alias field does not exist. @@ -276,14 +302,90 @@ func NewErrCannotSetFieldID(name string, id client.FieldID) error { ) } -func NewErrCannotAddRelationalField(name string, kind client.FieldKind) error { +func NewErrRelationalFieldMissingSchema(name string, kind client.FieldKind) error { return errors.New( - errCannotAddRelationalField, + errRelationalFieldMissingSchema, errors.NewKV("Field", name), errors.NewKV("Kind", kind), ) } +func NewErrRelationalFieldInvalidRelationType(name string, expected any, actual client.RelationType) error { + return errors.New( + errRelationalFieldInvalidRelationType, + errors.NewKV("Field", name), + errors.NewKV("Expected", expected), + errors.NewKV("Actual", actual), + ) +} + +func NewErrRelationalFieldMissingIDField(name string, expectedName string) error { + return errors.New( + errRelationalFieldMissingIDField, + errors.NewKV("Field", name), + errors.NewKV("ExpectedIDFieldName", expectedName), + ) +} + +func NewErrRelationalFieldMissingRelationName(name string) error { + return errors.New( + errRelationalFieldMissingRelationName, + errors.NewKV("Field", name), + ) +} + +func NewErrPrimarySideNotDefined(relationName string) error { + return errors.New( + errPrimarySideNotDefined, + errors.NewKV("RelationName", relationName), + ) +} + +func NewErrPrimarySideOnMany(name string) error { + return errors.New( + errPrimarySideOnMany, + errors.NewKV("Field", name), + ) +} + +func NewErrBothSidesPrimary(relationName string) error { + return errors.New( + errBothSidesPrimary, + errors.NewKV("RelationName", relationName), + ) +} + +func NewErrRelatedFieldKindMismatch(relationName string, expected client.FieldKind, actual client.FieldKind) error { + return errors.New( + errRelatedFieldKindMismatch, + errors.NewKV("RelationName", relationName), + errors.NewKV("Expected", expected), + errors.NewKV("Actual", actual), + ) +} + +func NewErrRelatedFieldRelationTypeMismatch( + relationName string, + expected client.RelationType, + actual client.RelationType, +) error { + return errors.New( + errRelatedFieldRelationTypeMismatch, + errors.NewKV("RelationName", relationName), + errors.NewKV("Expected", expected), + errors.NewKV("Actual", actual), + ) +} + +func NewErrRelationalFieldIDInvalidType(name string, expected, actual client.FieldKind) error { + return errors.New( + errRelationalFieldIDInvalidType, + errors.NewKV("Field", name), + errors.NewKV("Expected", expected), + errors.NewKV("Actual", actual), + ) +} + func NewErrFieldKindNotFound(kind string) error { return errors.New( errFieldKindNotFound, @@ -291,6 +393,22 @@ func NewErrFieldKindNotFound(kind string) error { ) } +func NewErrFieldKindDoesNotMatchFieldSchema(kind string, schema string) error { + return errors.New( + errFieldKindDoesNotMatchFieldSchema, + errors.NewKV("Kind", kind), + errors.NewKV("Schema", schema), + ) +} + +func NewErrSchemaNotFound(name string, schema string) error { + return errors.New( + errSchemaNotFound, + errors.NewKV("Field", name), + errors.NewKV("Schema", schema), + ) +} + func NewErrDuplicateField(name string) error { return errors.New(errDuplicateField, errors.NewKV("Name", name)) } @@ -486,3 +604,20 @@ func NewErrDocCreate(inner error) error { func NewErrDocUpdate(inner error) error { return errors.Wrap(errDocUpdate, inner) } + +func NewErrOneOneAlreadyLinked(documentId, targetId, relationName string) error { + return errors.New( + errOneOneAlreadyLinked, + errors.NewKV("DocumentID", documentId), + errors.NewKV("TargetID", targetId), + errors.NewKV("RelationName", relationName), + ) +} + +func NewErrIndexDoesNotMatchName(index, name string) error { + return errors.New( + errIndexDoesNotMatchName, + errors.NewKV("Index", index), + errors.NewKV("Name", name), + ) +} diff --git a/db/fetcher/encoded_doc.go b/db/fetcher/encoded_doc.go index ec3803a2fa..3e19eb2218 100644 --- a/db/fetcher/encoded_doc.go +++ b/db/fetcher/encoded_doc.go @@ -22,13 +22,15 @@ type EncodedDocument interface { // Key returns the key of the document Key() []byte SchemaVersionID() string + // Status returns the document status. + // + // For example, whether it is deleted or active. + Status() client.DocumentStatus + // Properties returns a copy of the decoded property values mapped by their field + // description. + Properties(onlyFilterProps bool) (map[client.FieldDescription]any, error) // Reset re-initializes the EncodedDocument object. Reset() - // Decode returns a properly decoded document object - Decode() (*client.Document, error) - // DecodeToDoc returns a decoded document as a - // map of field/value pairs - DecodeToDoc() (core.Doc, error) } type EPTuple []encProperty @@ -59,12 +61,11 @@ func (e encProperty) Decode() (any, error) { // @todo: Implement Encoded Document type type encodedDocument struct { - mapping *core.DocumentMapping - doc *core.Doc - - key []byte - schemaVersionID string - Properties map[client.FieldDescription]*encProperty + key []byte + schemaVersionID string + status client.DocumentStatus + properties map[client.FieldDescription]*encProperty + decodedPropertyCache map[client.FieldDescription]any // tracking bitsets // A value of 1 indicates a required field @@ -85,32 +86,36 @@ func (encdoc *encodedDocument) SchemaVersionID() string { return encdoc.schemaVersionID } +func (encdoc *encodedDocument) Status() client.DocumentStatus { + return encdoc.status +} + // Reset re-initializes the EncodedDocument object. func (encdoc *encodedDocument) Reset() { - encdoc.Properties = make(map[client.FieldDescription]*encProperty, 0) + encdoc.properties = make(map[client.FieldDescription]*encProperty, 0) encdoc.key = nil - if encdoc.mapping != nil { - doc := encdoc.mapping.NewDoc() - encdoc.doc = &doc - } encdoc.filterSet = nil encdoc.selectSet = nil encdoc.schemaVersionID = "" + encdoc.status = 0 + encdoc.decodedPropertyCache = nil } // Decode returns a properly decoded document object -func (encdoc *encodedDocument) Decode() (*client.Document, error) { - key, err := client.NewDocKeyFromString(string(encdoc.key)) +func Decode(encdoc EncodedDocument) (*client.Document, error) { + key, err := client.NewDocKeyFromString(string(encdoc.Key())) if err != nil { return nil, err } + doc := client.NewDocWithKey(key) - for _, prop := range encdoc.Properties { - val, err := prop.Decode() - if err != nil { - return nil, err - } - err = doc.SetAs(prop.Desc.Name, val, prop.Desc.Typ) + properties, err := encdoc.Properties(false) + if err != nil { + return nil, err + } + + for desc, val := range properties { + err = doc.SetAs(desc.Name, val, desc.Typ) if err != nil { return nil, err } @@ -118,42 +123,63 @@ func (encdoc *encodedDocument) Decode() (*client.Document, error) { doc.SchemaVersionID = encdoc.SchemaVersionID() + // client.Document tracks which fields have been set ('dirtied'), here we + // are simply decoding a clean document and the dirty flag is an artifact + // of the current client.Document interface. + doc.Clean() + return doc, nil } // DecodeToDoc returns a decoded document as a // map of field/value pairs -func (encdoc *encodedDocument) DecodeToDoc() (core.Doc, error) { - return encdoc.decodeToDoc(false) -} +func DecodeToDoc(encdoc EncodedDocument, mapping *core.DocumentMapping, filter bool) (core.Doc, error) { + doc := mapping.NewDoc() + doc.SetKey(string(encdoc.Key())) -func (encdoc *encodedDocument) decodeToDocForFilter() (core.Doc, error) { - return encdoc.decodeToDoc(true) -} + properties, err := encdoc.Properties(filter) + if err != nil { + return core.Doc{}, err + } -func (encdoc *encodedDocument) decodeToDoc(filter bool) (core.Doc, error) { - if encdoc.mapping == nil { - return core.Doc{}, ErrMissingMapper + for desc, value := range properties { + doc.Fields[desc.ID] = value } - if encdoc.doc == nil { - doc := encdoc.mapping.NewDoc() - encdoc.doc = &doc + + doc.SchemaVersionID = encdoc.SchemaVersionID() + doc.Status = encdoc.Status() + + return doc, nil +} + +func (encdoc *encodedDocument) Properties(onlyFilterProps bool) (map[client.FieldDescription]any, error) { + result := map[client.FieldDescription]any{} + if encdoc.decodedPropertyCache == nil { + encdoc.decodedPropertyCache = map[client.FieldDescription]any{} } - encdoc.doc.SetKey(string(encdoc.key)) - for _, prop := range encdoc.Properties { - if encdoc.doc.Fields[prop.Desc.ID] != nil { // used cached decoded fields + + for _, prop := range encdoc.properties { + // only get filter fields if filter=true + if onlyFilterProps && !prop.IsFilter { continue } - if filter && !prop.IsFilter { // only get filter fields if filter=true + + // used cached decoded fields + cachedValue := encdoc.decodedPropertyCache[prop.Desc] + if cachedValue != nil { + result[prop.Desc] = cachedValue continue } + val, err := prop.Decode() if err != nil { - return core.Doc{}, err + return nil, err } - encdoc.doc.Fields[prop.Desc.ID] = val + + // cache value + encdoc.decodedPropertyCache[prop.Desc] = val + result[prop.Desc] = val } - encdoc.doc.SchemaVersionID = encdoc.SchemaVersionID() - return *encdoc.doc, nil + return result, nil } diff --git a/db/fetcher/fetcher.go b/db/fetcher/fetcher.go index 35a89c29c0..34f05d4f1d 100644 --- a/db/fetcher/fetcher.go +++ b/db/fetcher/fetcher.go @@ -62,8 +62,6 @@ type Fetcher interface { ) error Start(ctx context.Context, spans core.Spans) error FetchNext(ctx context.Context) (EncodedDocument, ExecInfo, error) - FetchNextDecoded(ctx context.Context) (*client.Document, ExecInfo, error) - FetchNextDoc(ctx context.Context, mapping *core.DocumentMapping) ([]byte, core.Doc, ExecInfo, error) Close() error } @@ -79,8 +77,9 @@ var ( // DocumentFetcher is a utility to incrementally fetch all the documents. type DocumentFetcher struct { - col *client.CollectionDescription - reverse bool + col *client.CollectionDescription + reverse bool + deletedDocs bool txn datastore.Txn spans core.Spans @@ -111,7 +110,8 @@ type DocumentFetcher struct { // for effectiently finding the next field to seek to. filterSet *bitset.BitSet - doc *encodedDocument + doc *encodedDocument + mapping *core.DocumentMapping initialized bool @@ -174,7 +174,7 @@ func (df *DocumentFetcher) init( df.filter = filter df.isReadingDocument = false df.doc = new(encodedDocument) - df.doc.mapping = docMapper + df.mapping = docMapper if df.filter != nil && docMapper == nil { return ErrMissingMapper @@ -208,7 +208,7 @@ func (df *DocumentFetcher) init( } if df.filter != nil { - conditions := df.filter.ToMap(df.doc.mapping) + conditions := df.filter.ToMap(df.mapping) parsedfilterFields, err := parser.ParseFilterFieldsForDescription(conditions, df.col.Schema) if err != nil { return err @@ -246,6 +246,8 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele return client.NewErrUninitializeProperty("DocumentFetcher", "Document") } + df.deletedDocs = withDeleted + if !spans.HasValue { // no specified spans so create a prefix scan key for the entire collection start := base.MakeCollectionKey(*df.col) if withDeleted { @@ -475,6 +477,12 @@ func (df *DocumentFetcher) processKV(kv *keyValue) error { df.doc.key = []byte(kv.Key.DocKey) df.passedFilter = false df.ranFilter = false + + if df.deletedDocs { + df.doc.status = client.Deleted + } else { + df.doc.status = client.Active + } } if kv.Key.FieldId == core.DATASTORE_DOC_VERSION_FIELD_ID { @@ -514,7 +522,7 @@ func (df *DocumentFetcher) processKV(kv *keyValue) error { df.execInfo.FieldsFetched++ - df.doc.Properties[fieldDesc] = property + df.doc.properties[fieldDesc] = property return nil } @@ -522,6 +530,41 @@ func (df *DocumentFetcher) processKV(kv *keyValue) error { // FetchNext returns a raw binary encoded document. It iterates over all the relevant // keypairs from the underlying store and constructs the document. func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo, error) { + var resultExecInfo ExecInfo + + // If the deletedDocFetcher isn't nil, this means that the user requested to include the deleted documents + // in the query. To keep the active and deleted docs in lexicographic order of dockeys, we use the two distinct + // fetchers and fetch the one that has the next lowest (or highest if requested in reverse order) dockey value. + ddf := df.deletedDocFetcher + if ddf != nil { + // If we've reached the end of the deleted docs, we can skip to getting the next active docs. + if !ddf.kvEnd { + if df.kvEnd || + (df.reverse && ddf.kv.Key.DocKey > df.kv.Key.DocKey) || + (!df.reverse && ddf.kv.Key.DocKey < df.kv.Key.DocKey) { + encdoc, execInfo, err := ddf.FetchNext(ctx) + if err != nil { + return nil, ExecInfo{}, err + } + if encdoc != nil { + return encdoc, execInfo, err + } + + resultExecInfo.Add(execInfo) + } + } + } + + encdoc, execInfo, err := df.fetchNext(ctx) + if err != nil { + return nil, ExecInfo{}, err + } + resultExecInfo.Add(execInfo) + + return encdoc, resultExecInfo, err +} + +func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, ExecInfo, error) { if df.kvEnd { return nil, ExecInfo{}, nil } @@ -547,7 +590,7 @@ func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, Exec // only run filter if we've collected all the fields // required for filtering. This is tracked by the bitsets. if df.filterSet.Equal(df.doc.filterSet) { - filterDoc, err := df.doc.decodeToDocForFilter() + filterDoc, err := DecodeToDoc(df.doc, df.mapping, true) if err != nil { return nil, ExecInfo{}, err } @@ -575,7 +618,7 @@ func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, Exec if df.passedFilter { return df.doc, df.execInfo, nil } else if !df.ranFilter { // if we didn't run, run it - decodedDoc, err := df.doc.DecodeToDoc() + decodedDoc, err := DecodeToDoc(df.doc, df.mapping, false) if err != nil { return nil, ExecInfo{}, err } @@ -608,79 +651,6 @@ func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, Exec } } -// FetchNextDecoded implements DocumentFetcher -func (df *DocumentFetcher) FetchNextDecoded(ctx context.Context) (*client.Document, ExecInfo, error) { - encdoc, execInfo, err := df.FetchNext(ctx) - if err != nil { - return nil, ExecInfo{}, err - } - if encdoc == nil { - return nil, ExecInfo{}, nil - } - - decodedDoc, err := encdoc.Decode() - if err != nil { - return nil, ExecInfo{}, err - } - - return decodedDoc, execInfo, nil -} - -// FetchNextDoc returns the next document as a core.Doc. -// The first return value is the parsed document key. -func (df *DocumentFetcher) FetchNextDoc( - ctx context.Context, - mapping *core.DocumentMapping, -) ([]byte, core.Doc, ExecInfo, error) { - var err error - var encdoc EncodedDocument - var status client.DocumentStatus - var resultExecInfo ExecInfo - - // If the deletedDocFetcher isn't nil, this means that the user requested to include the deleted documents - // in the query. To keep the active and deleted docs in lexicographic order of dockeys, we use the two distinct - // fetchers and fetch the one that has the next lowest (or highest if requested in reverse order) dockey value. - ddf := df.deletedDocFetcher - if ddf != nil { - // If we've reached the end of the deleted docs, we can skip to getting the next active docs. - if !ddf.kvEnd { - if df.kvEnd || - (df.reverse && ddf.kv.Key.DocKey > df.kv.Key.DocKey) || - (!df.reverse && ddf.kv.Key.DocKey < df.kv.Key.DocKey) { - var execInfo ExecInfo - encdoc, execInfo, err = ddf.FetchNext(ctx) - if err != nil { - return nil, core.Doc{}, ExecInfo{}, err - } - status = client.Deleted - resultExecInfo.Add(execInfo) - } - } - } - - // At this point id encdoc is nil, it means that the next document to be - // returned will be from the active ones. - if encdoc == nil { - var execInfo ExecInfo - encdoc, execInfo, err = df.FetchNext(ctx) - if err != nil { - return nil, core.Doc{}, ExecInfo{}, err - } - resultExecInfo.Add(execInfo) - if encdoc == nil { - return nil, core.Doc{}, resultExecInfo, err - } - status = client.Active - } - - doc, err := encdoc.DecodeToDoc() - if err != nil { - return nil, core.Doc{}, ExecInfo{}, err - } - doc.Status = status - return encdoc.Key(), doc, resultExecInfo, err -} - // Close closes the DocumentFetcher. func (df *DocumentFetcher) Close() error { if df.kvIter != nil { diff --git a/db/fetcher/mocks/EncodedDocument.go b/db/fetcher/mocks/encoded_document.go similarity index 63% rename from db/fetcher/mocks/EncodedDocument.go rename to db/fetcher/mocks/encoded_document.go index 23522ef1f2..538d32ff4d 100644 --- a/db/fetcher/mocks/EncodedDocument.go +++ b/db/fetcher/mocks/encoded_document.go @@ -1,10 +1,9 @@ -// Code generated by mockery v2.30.1. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks import ( client "github.com/sourcenetwork/defradb/client" - core "github.com/sourcenetwork/defradb/core" mock "github.com/stretchr/testify/mock" ) @@ -22,76 +21,68 @@ func (_m *EncodedDocument) EXPECT() *EncodedDocument_Expecter { return &EncodedDocument_Expecter{mock: &_m.Mock} } -// Decode provides a mock function with given fields: -func (_m *EncodedDocument) Decode() (*client.Document, error) { +// Key provides a mock function with given fields: +func (_m *EncodedDocument) Key() []byte { ret := _m.Called() - var r0 *client.Document - var r1 error - if rf, ok := ret.Get(0).(func() (*client.Document, error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() *client.Document); ok { + var r0 []byte + if rf, ok := ret.Get(0).(func() []byte); ok { r0 = rf() } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*client.Document) + r0 = ret.Get(0).([]byte) } } - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 + return r0 } -// EncodedDocument_Decode_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Decode' -type EncodedDocument_Decode_Call struct { +// EncodedDocument_Key_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Key' +type EncodedDocument_Key_Call struct { *mock.Call } -// Decode is a helper method to define mock.On call -func (_e *EncodedDocument_Expecter) Decode() *EncodedDocument_Decode_Call { - return &EncodedDocument_Decode_Call{Call: _e.mock.On("Decode")} +// Key is a helper method to define mock.On call +func (_e *EncodedDocument_Expecter) Key() *EncodedDocument_Key_Call { + return &EncodedDocument_Key_Call{Call: _e.mock.On("Key")} } -func (_c *EncodedDocument_Decode_Call) Run(run func()) *EncodedDocument_Decode_Call { +func (_c *EncodedDocument_Key_Call) Run(run func()) *EncodedDocument_Key_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } -func (_c *EncodedDocument_Decode_Call) Return(_a0 *client.Document, _a1 error) *EncodedDocument_Decode_Call { - _c.Call.Return(_a0, _a1) +func (_c *EncodedDocument_Key_Call) Return(_a0 []byte) *EncodedDocument_Key_Call { + _c.Call.Return(_a0) return _c } -func (_c *EncodedDocument_Decode_Call) RunAndReturn(run func() (*client.Document, error)) *EncodedDocument_Decode_Call { +func (_c *EncodedDocument_Key_Call) RunAndReturn(run func() []byte) *EncodedDocument_Key_Call { _c.Call.Return(run) return _c } -// DecodeToDoc provides a mock function with given fields: -func (_m *EncodedDocument) DecodeToDoc() (core.Doc, error) { - ret := _m.Called() +// Properties provides a mock function with given fields: onlyFilterProps +func (_m *EncodedDocument) Properties(onlyFilterProps bool) (map[client.FieldDescription]interface{}, error) { + ret := _m.Called(onlyFilterProps) - var r0 core.Doc + var r0 map[client.FieldDescription]interface{} var r1 error - if rf, ok := ret.Get(0).(func() (core.Doc, error)); ok { - return rf() + if rf, ok := ret.Get(0).(func(bool) (map[client.FieldDescription]interface{}, error)); ok { + return rf(onlyFilterProps) } - if rf, ok := ret.Get(0).(func() core.Doc); ok { - r0 = rf() + if rf, ok := ret.Get(0).(func(bool) map[client.FieldDescription]interface{}); ok { + r0 = rf(onlyFilterProps) } else { - r0 = ret.Get(0).(core.Doc) + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[client.FieldDescription]interface{}) + } } - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() + if rf, ok := ret.Get(1).(func(bool) error); ok { + r1 = rf(onlyFilterProps) } else { r1 = ret.Error(1) } @@ -99,72 +90,30 @@ func (_m *EncodedDocument) DecodeToDoc() (core.Doc, error) { return r0, r1 } -// EncodedDocument_DecodeToDoc_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DecodeToDoc' -type EncodedDocument_DecodeToDoc_Call struct { +// EncodedDocument_Properties_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Properties' +type EncodedDocument_Properties_Call struct { *mock.Call } -// DecodeToDoc is a helper method to define mock.On call -func (_e *EncodedDocument_Expecter) DecodeToDoc() *EncodedDocument_DecodeToDoc_Call { - return &EncodedDocument_DecodeToDoc_Call{Call: _e.mock.On("DecodeToDoc")} +// Properties is a helper method to define mock.On call +// - onlyFilterProps bool +func (_e *EncodedDocument_Expecter) Properties(onlyFilterProps interface{}) *EncodedDocument_Properties_Call { + return &EncodedDocument_Properties_Call{Call: _e.mock.On("Properties", onlyFilterProps)} } -func (_c *EncodedDocument_DecodeToDoc_Call) Run(run func()) *EncodedDocument_DecodeToDoc_Call { +func (_c *EncodedDocument_Properties_Call) Run(run func(onlyFilterProps bool)) *EncodedDocument_Properties_Call { _c.Call.Run(func(args mock.Arguments) { - run() + run(args[0].(bool)) }) return _c } -func (_c *EncodedDocument_DecodeToDoc_Call) Return(_a0 core.Doc, _a1 error) *EncodedDocument_DecodeToDoc_Call { +func (_c *EncodedDocument_Properties_Call) Return(_a0 map[client.FieldDescription]interface{}, _a1 error) *EncodedDocument_Properties_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *EncodedDocument_DecodeToDoc_Call) RunAndReturn(run func() (core.Doc, error)) *EncodedDocument_DecodeToDoc_Call { - _c.Call.Return(run) - return _c -} - -// Key provides a mock function with given fields: -func (_m *EncodedDocument) Key() []byte { - ret := _m.Called() - - var r0 []byte - if rf, ok := ret.Get(0).(func() []byte); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - return r0 -} - -// EncodedDocument_Key_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Key' -type EncodedDocument_Key_Call struct { - *mock.Call -} - -// Key is a helper method to define mock.On call -func (_e *EncodedDocument_Expecter) Key() *EncodedDocument_Key_Call { - return &EncodedDocument_Key_Call{Call: _e.mock.On("Key")} -} - -func (_c *EncodedDocument_Key_Call) Run(run func()) *EncodedDocument_Key_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *EncodedDocument_Key_Call) Return(_a0 []byte) *EncodedDocument_Key_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *EncodedDocument_Key_Call) RunAndReturn(run func() []byte) *EncodedDocument_Key_Call { +func (_c *EncodedDocument_Properties_Call) RunAndReturn(run func(bool) (map[client.FieldDescription]interface{}, error)) *EncodedDocument_Properties_Call { _c.Call.Return(run) return _c } @@ -242,6 +191,47 @@ func (_c *EncodedDocument_SchemaVersionID_Call) RunAndReturn(run func() string) return _c } +// Status provides a mock function with given fields: +func (_m *EncodedDocument) Status() client.DocumentStatus { + ret := _m.Called() + + var r0 client.DocumentStatus + if rf, ok := ret.Get(0).(func() client.DocumentStatus); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(client.DocumentStatus) + } + + return r0 +} + +// EncodedDocument_Status_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Status' +type EncodedDocument_Status_Call struct { + *mock.Call +} + +// Status is a helper method to define mock.On call +func (_e *EncodedDocument_Expecter) Status() *EncodedDocument_Status_Call { + return &EncodedDocument_Status_Call{Call: _e.mock.On("Status")} +} + +func (_c *EncodedDocument_Status_Call) Run(run func()) *EncodedDocument_Status_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *EncodedDocument_Status_Call) Return(_a0 client.DocumentStatus) *EncodedDocument_Status_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *EncodedDocument_Status_Call) RunAndReturn(run func() client.DocumentStatus) *EncodedDocument_Status_Call { + _c.Call.Return(run) + return _c +} + // NewEncodedDocument creates a new instance of EncodedDocument. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewEncodedDocument(t interface { diff --git a/db/fetcher/mocks/Fetcher.go b/db/fetcher/mocks/fetcher.go similarity index 99% rename from db/fetcher/mocks/Fetcher.go rename to db/fetcher/mocks/fetcher.go index 39f9c89c39..12bb386024 100644 --- a/db/fetcher/mocks/Fetcher.go +++ b/db/fetcher/mocks/fetcher.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.32.0. DO NOT EDIT. +// Code generated by mockery. DO NOT EDIT. package mocks diff --git a/db/fetcher/versioned.go b/db/fetcher/versioned.go index 53ae6b8eaf..f1c7b6a9de 100644 --- a/db/fetcher/versioned.go +++ b/db/fetcher/versioned.go @@ -121,6 +121,8 @@ func (vf *VersionedFetcher) Init( vf.store, err = datastore.NewTxnFrom( ctx, vf.root, + // We can take the parent txn id here + txn.ID(), false, ) // were going to discard and nuke this later if err != nil { @@ -402,8 +404,7 @@ func (vf *VersionedFetcher) processNode( return err } - height := delta.GetPriority() - _, err = mcrdt.Clock().ProcessNode(vf.ctx, nil, nd.Cid(), height, delta, nd) + _, err = mcrdt.Clock().ProcessNode(vf.ctx, nil, delta, nd) return err } diff --git a/db/fetcher_test.go b/db/fetcher_test.go index 209fb7a8c3..e2c3647792 100644 --- a/db/fetcher_test.go +++ b/db/fetcher_test.go @@ -15,7 +15,6 @@ import ( "testing" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" @@ -183,162 +182,3 @@ func TestFetcherGetAllPrimaryIndexEncodedDocMultiple(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, encdoc) } - -func TestFetcherGetAllPrimaryIndexDecodedSingle(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(t, ctx, db) - assert.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{ - "Name": "John", - "Age": 21 - }`)) - assert.NoError(t, err) - err = col.Save(ctx, doc) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, true) - if err != nil { - t.Error(err) - return - } - - df := new(fetcher.DocumentFetcher) - desc := col.Description() - err = df.Init(ctx, txn, &desc, desc.Schema.Fields, nil, nil, false, false) - assert.NoError(t, err) - - err = df.Start(ctx, core.Spans{}) - assert.NoError(t, err) - - ddoc, _, err := df.FetchNextDecoded(ctx) - assert.NoError(t, err) - require.NotNil(t, ddoc) - - // value check - name, err := ddoc.Get("Name") - assert.NoError(t, err) - age, err := ddoc.Get("Age") - assert.NoError(t, err) - - assert.Equal(t, "John", name) - assert.Equal(t, uint64(21), age) -} - -func TestFetcherGetAllPrimaryIndexDecodedMultiple(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(t, ctx, db) - assert.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{ - "Name": "John", - "Age": 21 - }`)) - assert.NoError(t, err) - err = col.Save(ctx, doc) - assert.NoError(t, err) - - doc, err = client.NewDocFromJSON([]byte(`{ - "Name": "Alice", - "Age": 27 - }`)) - assert.NoError(t, err) - err = col.Save(ctx, doc) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, true) - if err != nil { - t.Error(err) - return - } - - df := new(fetcher.DocumentFetcher) - desc := col.Description() - err = df.Init(ctx, txn, &desc, desc.Schema.Fields, nil, nil, false, false) - assert.NoError(t, err) - - err = df.Start(ctx, core.Spans{}) - assert.NoError(t, err) - - ddoc, _, err := df.FetchNextDecoded(ctx) - assert.NoError(t, err) - assert.NotNil(t, ddoc) - - // value check - name, err := ddoc.Get("Name") - assert.NoError(t, err) - age, err := ddoc.Get("Age") - assert.NoError(t, err) - - assert.Equal(t, "John", name) - assert.Equal(t, uint64(21), age) - - ddoc, _, err = df.FetchNextDecoded(ctx) - assert.NoError(t, err) - assert.NotNil(t, ddoc) - - // value check - name, err = ddoc.Get("Name") - assert.NoError(t, err) - age, err = ddoc.Get("Age") - assert.NoError(t, err) - - assert.Equal(t, "Alice", name) - assert.Equal(t, uint64(27), age) -} - -func TestFetcherGetOnePrimaryIndexDecoded(t *testing.T) { - ctx := context.Background() - db, err := newMemoryDB(ctx) - assert.NoError(t, err) - - col, err := newTestCollectionWithSchema(t, ctx, db) - assert.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{ - "Name": "John", - "Age": 21 - }`)) - assert.NoError(t, err) - err = col.Save(ctx, doc) - assert.NoError(t, err) - - txn, err := db.NewTxn(ctx, true) - if err != nil { - t.Error(err) - return - } - - df := new(fetcher.DocumentFetcher) - desc := col.Description() - err = df.Init(ctx, txn, &desc, desc.Schema.Fields, nil, nil, false, false) - assert.NoError(t, err) - - // create a span for our document we wish to find - docKey := base.MakeCollectionKey(desc).WithDocKey("bae-52b9170d-b77a-5887-b877-cbdbb99b009f") - spans := core.NewSpans( - core.NewSpan(docKey, docKey.PrefixEnd()), - ) - - err = df.Start(ctx, spans) - assert.NoError(t, err) - - ddoc, _, err := df.FetchNextDecoded(ctx) - assert.NoError(t, err) - assert.NotNil(t, ddoc) - - // value check - name, err := ddoc.Get("Name") - assert.NoError(t, err) - age, err := ddoc.Get("Age") - assert.NoError(t, err) - - assert.Equal(t, "John", name) - assert.Equal(t, uint64(21), age) -} diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 2c89d5f472..b62cb992d6 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -530,7 +530,7 @@ func TestNonUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { key2 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc2).Build() data, err := f.txn.Datastore().Get(f.ctx, key1.ToDS()) - require.NoError(t, err) + require.NoError(t, err, key1.ToString()) assert.Len(t, data, 0) data, err = f.txn.Datastore().Get(f.ctx, key2.ToDS()) require.NoError(t, err) @@ -570,8 +570,8 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t Name: "Fails to fetch next decoded", PrepareFetcher: func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().FetchNextDecoded(mock.Anything).Unset() - f.EXPECT().FetchNextDecoded(mock.Anything).Return(nil, fetcher.ExecInfo{}, testError) + f.EXPECT().FetchNext(mock.Anything).Unset() + f.EXPECT().FetchNext(mock.Anything).Return(nil, fetcher.ExecInfo{}, testError) f.EXPECT().Close().Unset() f.EXPECT().Close().Return(nil) return f @@ -581,8 +581,8 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t Name: "Fails to close", PrepareFetcher: func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().FetchNextDecoded(mock.Anything).Unset() - f.EXPECT().FetchNextDecoded(mock.Anything).Return(nil, fetcher.ExecInfo{}, nil) + f.EXPECT().FetchNext(mock.Anything).Unset() + f.EXPECT().FetchNext(mock.Anything).Return(nil, fetcher.ExecInfo{}, nil) f.EXPECT().Close().Unset() f.EXPECT().Close().Return(testError) return f @@ -854,8 +854,8 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { Name: "Fails to fetch next decoded", PrepareFetcher: func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().FetchNextDecoded(mock.Anything).Unset() - f.EXPECT().FetchNextDecoded(mock.Anything).Return(nil, fetcher.ExecInfo{}, testError) + f.EXPECT().FetchNext(mock.Anything).Unset() + f.EXPECT().FetchNext(mock.Anything).Return(nil, fetcher.ExecInfo{}, testError) f.EXPECT().Close().Unset() f.EXPECT().Close().Return(nil) return f @@ -865,10 +865,10 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { Name: "Fails to close", PrepareFetcher: func() fetcher.Fetcher { f := fetcherMocks.NewStubbedFetcher(t) - f.EXPECT().FetchNextDecoded(mock.Anything).Unset() + f.EXPECT().FetchNext(mock.Anything).Unset() // By default the the stubbed fetcher returns an empty, invalid document // here we need to make sure it reaches the Close call by overriding that default. - f.EXPECT().FetchNextDecoded(mock.Anything).Maybe().Return(nil, fetcher.ExecInfo{}, nil) + f.EXPECT().FetchNext(mock.Anything).Maybe().Return(nil, fetcher.ExecInfo{}, nil) f.EXPECT().Close().Unset() f.EXPECT().Close().Return(testError) return f @@ -994,14 +994,15 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { err := doc.Set(usersNameFieldName, "Islam") require.NoError(t, err) - // This is only required as we are using it as a return value - // in production this value will have been set by the fetcher - doc.SchemaVersionID = f.users.Schema().VersionID + encodedDoc := shimEncodedDocument{ + key: []byte(doc.Key().String()), + schemaVersionID: f.users.Schema().VersionID, + } f.users.fetcherFactory = func() fetcher.Fetcher { df := fetcherMocks.NewStubbedFetcher(t) - df.EXPECT().FetchNextDecoded(mock.Anything).Unset() - df.EXPECT().FetchNextDecoded(mock.Anything).Return(doc, fetcher.ExecInfo{}, nil) + df.EXPECT().FetchNext(mock.Anything).Unset() + df.EXPECT().FetchNext(mock.Anything).Return(&encodedDoc, fetcher.ExecInfo{}, nil) return df } @@ -1047,3 +1048,35 @@ func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) { _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) require.Error(t, err) } + +type shimEncodedDocument struct { + key []byte + schemaVersionID string + status client.DocumentStatus + properties map[client.FieldDescription]any +} + +var _ fetcher.EncodedDocument = (*shimEncodedDocument)(nil) + +func (encdoc *shimEncodedDocument) Key() []byte { + return encdoc.key +} + +func (encdoc *shimEncodedDocument) SchemaVersionID() string { + return encdoc.schemaVersionID +} + +func (encdoc *shimEncodedDocument) Status() client.DocumentStatus { + return encdoc.status +} + +func (encdoc *shimEncodedDocument) Properties(onlyFilterProps bool) (map[client.FieldDescription]any, error) { + return encdoc.properties, nil +} + +func (encdoc *shimEncodedDocument) Reset() { + encdoc.key = nil + encdoc.schemaVersionID = "" + encdoc.status = 0 + encdoc.properties = map[client.FieldDescription]any{} +} diff --git a/db/schema.go b/db/schema.go index e85b0b6a72..5c5c0568f8 100644 --- a/db/schema.go +++ b/db/schema.go @@ -13,14 +13,25 @@ package db import ( "context" "encoding/json" + "fmt" "strings" + "unicode" jsonpatch "github.com/evanphx/json-patch/v5" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) +const ( + schemaNamePathIndex int = 0 + schemaPathIndex int = 1 + fieldsPathIndex int = 2 + fieldIndexPathIndex int = 3 +) + // addSchema takes the provided schema in SDL format, and applies it to the database, // and creates the necessary collections, request types, etc. func (db *db) addSchema( @@ -97,13 +108,14 @@ func (db *db) patchSchema(ctx context.Context, txn datastore.Txn, patchString st if err != nil { return err } - // Here we swap out any string representations of enums for their integer values - patch, err = substituteSchemaPatch(patch) + + collectionsByName, err := db.getCollectionsByName(ctx, txn) if err != nil { return err } - collectionsByName, err := db.getCollectionsByName(ctx, txn) + // Here we swap out any string representations of enums for their integer values + patch, err = substituteSchemaPatch(patch, collectionsByName) if err != nil { return err } @@ -131,10 +143,12 @@ func (db *db) patchSchema(ctx context.Context, txn datastore.Txn, patchString st newDescriptions = append(newDescriptions, desc) } - for _, desc := range newDescriptions { - if _, err := db.updateCollection(ctx, txn, desc); err != nil { + for i, desc := range newDescriptions { + col, err := db.updateCollection(ctx, txn, collectionsByName, newDescriptionsByName, desc) + if err != nil { return err } + newDescriptions[i] = col.Description() } return db.parser.SetSchema(ctx, txn, newDescriptions) @@ -162,39 +176,96 @@ func (db *db) getCollectionsByName( // // For example Field [FieldKind] string representations will be replaced by the raw integer // value. -func substituteSchemaPatch(patch jsonpatch.Patch) (jsonpatch.Patch, error) { +func substituteSchemaPatch( + patch jsonpatch.Patch, + collectionsByName map[string]client.CollectionDescription, +) (jsonpatch.Patch, error) { + fieldIndexesByCollection := make(map[string]map[string]int, len(collectionsByName)) + for colName, col := range collectionsByName { + fieldIndexesByName := make(map[string]int, len(col.Schema.Fields)) + fieldIndexesByCollection[colName] = fieldIndexesByName + for i, field := range col.Schema.Fields { + fieldIndexesByName[field.Name] = i + } + } + for _, patchOperation := range patch { path, err := patchOperation.Path() if err != nil { return nil, err } + path = strings.TrimPrefix(path, "/") + splitPath := strings.Split(path, "/") + if value, hasValue := patchOperation["value"]; hasValue { - if isField(path) { + var newPatchValue immutable.Option[any] + var field map[string]any + isField := isField(splitPath) + + if isField { // We unmarshal the full field-value into a map to ensure that all user // specified properties are maintained. - var field map[string]any err = json.Unmarshal(*value, &field) if err != nil { return nil, err } + } - if kind, isString := field["Kind"].(string); isString { - substitute, substituteFound := client.FieldKindStringToEnumMapping[kind] - if substituteFound { - field["Kind"] = substitute - substituteField, err := json.Marshal(field) - if err != nil { - return nil, err + if isFieldOrInner(splitPath) { + fieldIndexer := splitPath[fieldIndexPathIndex] + + if containsLetter(fieldIndexer) { + if isField { + if nameValue, hasName := field["Name"]; hasName { + if name, isString := nameValue.(string); isString && name != fieldIndexer { + return nil, NewErrIndexDoesNotMatchName(fieldIndexer, name) + } + } else { + field["Name"] = fieldIndexer } + newPatchValue = immutable.Some[any](field) + } - substituteValue := json.RawMessage(substituteField) - patchOperation["value"] = &substituteValue - } else { - return nil, NewErrFieldKindNotFound(kind) + desc := collectionsByName[splitPath[schemaNamePathIndex]] + var index string + if fieldIndexesByName, ok := fieldIndexesByCollection[desc.Name]; ok { + if i, ok := fieldIndexesByName[fieldIndexer]; ok { + index = fmt.Sprint(i) + } + } + if index == "" { + index = "-" + // If this is a new field we need to track its location so that subsequent operations + // within the patch may access it by field name. + fieldIndexesByCollection[desc.Name][fieldIndexer] = len(fieldIndexesByCollection[desc.Name]) } + + splitPath[fieldIndexPathIndex] = index + path = strings.Join(splitPath, "/") + opPath := json.RawMessage([]byte(fmt.Sprintf(`"/%s"`, path))) + patchOperation["path"] = &opPath } - } else if isFieldKind(path) { + } + + if isField { + if kind, isString := field["Kind"].(string); isString { + substitute, collectionName, err := getSubstituteFieldKind(kind, collectionsByName) + if err != nil { + return nil, err + } + + field["Kind"] = substitute + if collectionName != "" { + if field["Schema"] != nil && field["Schema"] != collectionName { + return nil, NewErrFieldKindDoesNotMatchFieldSchema(kind, field["Schema"].(string)) + } + field["Schema"] = collectionName + } + + newPatchValue = immutable.Some[any](field) + } + } else if isFieldKind(splitPath) { var kind any err = json.Unmarshal(*value, &kind) if err != nil { @@ -202,19 +273,23 @@ func substituteSchemaPatch(patch jsonpatch.Patch) (jsonpatch.Patch, error) { } if kind, isString := kind.(string); isString { - substitute, substituteFound := client.FieldKindStringToEnumMapping[kind] - if substituteFound { - substituteKind, err := json.Marshal(substitute) - if err != nil { - return nil, err - } - - substituteValue := json.RawMessage(substituteKind) - patchOperation["value"] = &substituteValue - } else { - return nil, NewErrFieldKindNotFound(kind) + substitute, _, err := getSubstituteFieldKind(kind, collectionsByName) + if err != nil { + return nil, err } + + newPatchValue = immutable.Some[any](substitute) + } + } + + if newPatchValue.HasValue() { + substitute, err := json.Marshal(newPatchValue.Value()) + if err != nil { + return nil, err } + + substitutedValue := json.RawMessage(substitute) + patchOperation["value"] = &substitutedValue } } } @@ -222,20 +297,61 @@ func substituteSchemaPatch(patch jsonpatch.Patch) (jsonpatch.Patch, error) { return patch, nil } -// isField returns true if the given path points to a FieldDescription. -func isField(path string) bool { - path = strings.TrimPrefix(path, "/") - elements := strings.Split(path, "/") +// getSubstituteFieldKind checks and attempts to get the underlying integer value for the given string +// Field Kind value. It will return the value if one is found, else returns an [ErrFieldKindNotFound]. +// +// If the value represents a foreign relation the collection name will also be returned. +func getSubstituteFieldKind( + kind string, + collectionsByName map[string]client.CollectionDescription, +) (client.FieldKind, string, error) { + substitute, substituteFound := client.FieldKindStringToEnumMapping[kind] + if substituteFound { + return substitute, "", nil + } else { + var collectionName string + var substitute client.FieldKind + if len(kind) > 0 && kind[0] == '[' && kind[len(kind)-1] == ']' { + collectionName = kind[1 : len(kind)-1] + substitute = client.FieldKind_FOREIGN_OBJECT_ARRAY + } else { + collectionName = kind + substitute = client.FieldKind_FOREIGN_OBJECT + } + + if _, substituteFound := collectionsByName[collectionName]; substituteFound { + return substitute, collectionName, nil + } + + return 0, "", NewErrFieldKindNotFound(kind) + } +} + +// isFieldOrInner returns true if the given path points to a FieldDescription or a property within it. +func isFieldOrInner(path []string) bool { //nolint:goconst - return len(elements) == 4 && elements[len(elements)-2] == "Fields" && elements[len(elements)-3] == "Schema" + return len(path) >= 4 && path[fieldsPathIndex] == "Fields" && path[schemaPathIndex] == "Schema" +} + +// isField returns true if the given path points to a FieldDescription. +func isField(path []string) bool { + return len(path) == 4 && path[fieldsPathIndex] == "Fields" && path[schemaPathIndex] == "Schema" } // isField returns true if the given path points to a FieldDescription.Kind property. -func isFieldKind(path string) bool { - path = strings.TrimPrefix(path, "/") - elements := strings.Split(path, "/") - return len(elements) == 5 && - elements[len(elements)-1] == "Kind" && - elements[len(elements)-3] == "Fields" && - elements[len(elements)-4] == "Schema" +func isFieldKind(path []string) bool { + return len(path) == 5 && + path[fieldIndexPathIndex+1] == "Kind" && + path[fieldsPathIndex] == "Fields" && + path[schemaPathIndex] == "Schema" +} + +// containsLetter returns true if the string contains a single unicode character. +func containsLetter(s string) bool { + for _, r := range s { + if unicode.IsLetter(r) { + return true + } + } + return false } diff --git a/db/txn_db.go b/db/txn_db.go index a7096a46a7..b307d96e35 100644 --- a/db/txn_db.go +++ b/db/txn_db.go @@ -28,7 +28,8 @@ type implicitTxnDB struct { type explicitTxnDB struct { *db - txn datastore.Txn + txn datastore.Txn + lensRegistry client.LensRegistry } // ExecRequest executes a request against the database. @@ -286,7 +287,7 @@ func (db *implicitTxnDB) SetMigration(ctx context.Context, cfg client.LensConfig } defer txn.Discard(ctx) - err = db.lensRegistry.SetMigration(ctx, txn, cfg) + err = db.lensRegistry.SetMigration(ctx, cfg) if err != nil { return err } @@ -295,7 +296,7 @@ func (db *implicitTxnDB) SetMigration(ctx context.Context, cfg client.LensConfig } func (db *explicitTxnDB) SetMigration(ctx context.Context, cfg client.LensConfig) error { - return db.lensRegistry.SetMigration(ctx, db.txn, cfg) + return db.lensRegistry.SetMigration(ctx, cfg) } // SetReplicator adds a new replicator to the database. @@ -417,3 +418,10 @@ func (db *implicitTxnDB) BasicExport(ctx context.Context, config *client.BackupC func (db *explicitTxnDB) BasicExport(ctx context.Context, config *client.BackupConfig) error { return db.basicExport(ctx, db.txn, config) } + +// LensRegistry returns the LensRegistry in use by this database instance. +// +// It exposes several useful thread-safe migration related functions. +func (db *explicitTxnDB) LensRegistry() client.LensRegistry { + return db.lensRegistry +} diff --git a/docs/data_format_changes/i1851-no-change-tests-updated.md b/docs/data_format_changes/i1851-no-change-tests-updated.md new file mode 100644 index 0000000000..7e3f8ef75e --- /dev/null +++ b/docs/data_format_changes/i1851-no-change-tests-updated.md @@ -0,0 +1,3 @@ +# Make existing mutation tests use mutation test system + +This is not a breaking change, tests were changed from using gql requests to CreateDoc and UpdateDoc actions, meaning the point at which the change detector split setup/assert shifted. diff --git a/errors/defraError.go b/errors/defraError.go index 7ebe4355de..2281add30e 100644 --- a/errors/defraError.go +++ b/errors/defraError.go @@ -58,7 +58,7 @@ func (e *defraError) Error() string { } func (e *defraError) Is(other error) bool { - switch otherTyped := other.(type) { //nolint:errorlint + switch otherTyped := other.(type) { case *defraError: return e.message == otherTyped.message default: diff --git a/go.mod b/go.mod index d935137715..4af3d59cb4 100644 --- a/go.mod +++ b/go.mod @@ -5,28 +5,28 @@ go 1.20 require ( github.com/bits-and-blooms/bitset v1.8.0 github.com/bxcodec/faker v2.0.1+incompatible - github.com/dgraph-io/badger/v3 v3.2103.5 + github.com/dgraph-io/badger/v4 v4.1.0 github.com/evanphx/json-patch/v5 v5.6.0 - github.com/fxamacker/cbor/v2 v2.4.0 + github.com/fxamacker/cbor/v2 v2.5.0 github.com/go-chi/chi/v5 v5.0.10 github.com/go-chi/cors v1.2.1 - github.com/go-errors/errors v1.4.2 + github.com/go-errors/errors v1.5.0 github.com/gofrs/uuid/v5 v5.0.0 github.com/graphql-go/graphql v0.8.1 github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 github.com/iancoleman/strcase v0.3.0 - github.com/ipfs/boxo v0.10.2 - github.com/ipfs/go-block-format v0.1.2 + github.com/ipfs/boxo v0.12.0 + github.com/ipfs/go-block-format v0.2.0 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 - github.com/ipfs/go-ipld-format v0.5.0 + github.com/ipfs/go-ipld-format v0.6.0 github.com/ipfs/go-log v1.0.5 github.com/ipfs/go-log/v2 v2.5.1 github.com/jbenet/goprocess v0.1.4 github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 - github.com/libp2p/go-libp2p v0.28.0 + github.com/libp2p/go-libp2p v0.29.2 github.com/libp2p/go-libp2p-gostream v0.6.0 - github.com/libp2p/go-libp2p-kad-dht v0.24.2 + github.com/libp2p/go-libp2p-kad-dht v0.23.0 github.com/libp2p/go-libp2p-pubsub v0.9.3 github.com/libp2p/go-libp2p-record v0.2.0 github.com/mitchellh/mapstructure v1.5.0 @@ -34,7 +34,7 @@ require ( github.com/multiformats/go-multibase v0.2.0 github.com/multiformats/go-multihash v0.2.3 github.com/pkg/errors v0.9.1 - github.com/planetscale/vtprotobuf v0.4.0 + github.com/planetscale/vtprotobuf v0.5.0 github.com/sourcenetwork/immutable v0.3.0 github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 @@ -44,19 +44,19 @@ require ( github.com/tidwall/btree v1.6.0 github.com/ugorji/go/codec v1.2.11 github.com/valyala/fastjson v1.6.4 - go.opentelemetry.io/otel/metric v1.16.0 - go.opentelemetry.io/otel/sdk/metric v0.39.0 - go.uber.org/zap v1.24.0 - golang.org/x/crypto v0.11.0 - golang.org/x/net v0.12.0 - google.golang.org/grpc v1.56.2 + github.com/vito/go-sse v1.0.0 + go.opentelemetry.io/otel/metric v1.18.0 + go.opentelemetry.io/otel/sdk/metric v0.40.0 + go.uber.org/zap v1.25.0 + golang.org/x/crypto v0.13.0 + golang.org/x/net v0.14.0 + google.golang.org/grpc v1.58.0 google.golang.org/protobuf v1.31.0 ) require ( github.com/benbjohnson/clock v1.3.5 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/cespare/xxhash v1.1.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect @@ -84,7 +84,7 @@ require ( github.com/golang/snappy v0.0.4 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect github.com/google/gopacket v1.1.19 // indirect - github.com/google/pprof v0.0.0-20230602150820-91b7bce49751 // indirect + github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8 // indirect github.com/google/uuid v1.3.0 // indirect github.com/gorilla/websocket v1.5.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect @@ -99,6 +99,7 @@ require ( github.com/ipfs/go-bitswap v0.12.0 // indirect github.com/ipfs/go-blockservice v0.5.1 // indirect github.com/ipfs/go-fetcher v1.6.1 // indirect + github.com/ipfs/go-ipfs-blockstore v1.2.0 // indirect github.com/ipfs/go-ipfs-config v0.19.0 // indirect github.com/ipfs/go-ipfs-delay v0.0.1 // indirect github.com/ipfs/go-ipfs-exchange-interface v0.2.0 // indirect @@ -108,16 +109,17 @@ require ( github.com/ipfs/go-ipfs-util v0.0.2 // indirect github.com/ipfs/go-ipld-cbor v0.0.6 // indirect github.com/ipfs/go-ipld-legacy v0.2.1 // indirect + github.com/ipfs/go-ipns v0.3.0 // indirect github.com/ipfs/go-libipfs v0.7.0 // indirect github.com/ipfs/go-merkledag v0.9.0 // indirect github.com/ipfs/go-metrics-interface v0.0.1 // indirect github.com/ipfs/go-peertaskqueue v0.8.1 // indirect github.com/ipfs/interface-go-ipfs-core v0.10.0 // indirect github.com/ipld/go-codec-dagpb v1.6.0 // indirect - github.com/ipld/go-ipld-prime v0.20.0 // indirect + github.com/ipld/go-ipld-prime v0.21.0 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect - github.com/klauspost/compress v1.16.5 // indirect + github.com/klauspost/compress v1.16.7 // indirect github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/koron/go-ssdp v0.0.4 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect @@ -126,18 +128,18 @@ require ( github.com/libp2p/go-libp2p-asn-util v0.3.0 // indirect github.com/libp2p/go-libp2p-connmgr v0.4.0 // indirect github.com/libp2p/go-libp2p-core v0.20.0 // indirect - github.com/libp2p/go-libp2p-kbucket v0.6.3 // indirect + github.com/libp2p/go-libp2p-kbucket v0.6.0 // indirect github.com/libp2p/go-libp2p-routing-helpers v0.7.0 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect github.com/libp2p/go-nat v0.2.0 // indirect github.com/libp2p/go-netroute v0.2.1 // indirect github.com/libp2p/go-reuseport v0.3.0 // indirect - github.com/libp2p/go-yamux/v4 v4.0.0 // indirect + github.com/libp2p/go-yamux/v4 v4.0.1 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd // indirect github.com/mattn/go-isatty v0.0.19 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect - github.com/miekg/dns v1.1.54 // indirect + github.com/miekg/dns v1.1.55 // indirect github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc // indirect github.com/minio/sha256-simd v1.0.1 // indirect @@ -149,7 +151,7 @@ require ( github.com/multiformats/go-multicodec v0.9.0 // indirect github.com/multiformats/go-multistream v0.4.1 // indirect github.com/multiformats/go-varint v0.0.7 // indirect - github.com/onsi/ginkgo/v2 v2.9.7 // indirect + github.com/onsi/ginkgo/v2 v2.11.0 // indirect github.com/opencontainers/runtime-spec v1.0.2 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect @@ -161,9 +163,9 @@ require ( github.com/prometheus/common v0.42.0 // indirect github.com/prometheus/procfs v0.9.0 // indirect github.com/quic-go/qpack v0.4.0 // indirect - github.com/quic-go/qtls-go1-19 v0.3.2 // indirect - github.com/quic-go/qtls-go1-20 v0.2.2 // indirect - github.com/quic-go/quic-go v0.33.0 // indirect + github.com/quic-go/qtls-go1-19 v0.3.3 // indirect + github.com/quic-go/qtls-go1-20 v0.2.3 // indirect + github.com/quic-go/quic-go v0.36.4 // indirect github.com/quic-go/webtransport-go v0.5.3 // indirect github.com/raulk/go-watchdog v1.3.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect @@ -179,22 +181,21 @@ require ( github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect github.com/x448/float16 v0.8.4 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/otel v1.16.0 // indirect - go.opentelemetry.io/otel/sdk v1.16.0 // indirect - go.opentelemetry.io/otel/trace v1.16.0 // indirect - go.uber.org/atomic v1.11.0 // indirect + go.opentelemetry.io/otel v1.18.0 // indirect + go.opentelemetry.io/otel/sdk v1.17.0 // indirect + go.opentelemetry.io/otel/trace v1.18.0 // indirect go.uber.org/dig v1.17.0 // indirect - go.uber.org/fx v1.19.2 // indirect + go.uber.org/fx v1.20.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect - golang.org/x/mod v0.11.0 // indirect - golang.org/x/sync v0.2.0 // indirect - golang.org/x/sys v0.10.0 // indirect - golang.org/x/text v0.11.0 // indirect - golang.org/x/tools v0.9.1 // indirect + golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 // indirect + golang.org/x/mod v0.12.0 // indirect + golang.org/x/sync v0.3.0 // indirect + golang.org/x/sys v0.12.0 // indirect + golang.org/x/text v0.13.0 // indirect + golang.org/x/tools v0.11.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect gonum.org/v1/gonum v0.13.0 // indirect - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230711160842-782d3b101e98 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect lukechampine.com/blake3 v1.2.1 // indirect @@ -202,6 +203,7 @@ require ( // SourceNetwork fork og graphql-go replace ( + github.com/dgraph-io/badger/v4 => github.com/sourcenetwork/badger/v4 v4.0.0-20230801145501-d3a57bd4c2ec github.com/graphql-go/graphql => github.com/sourcenetwork/graphql-go v0.7.10-0.20230511091704-fe7085512c23 github.com/textileio/go-libp2p-pubsub-rpc => github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.0-20230209220544-e16d5e34c4fc ) diff --git a/go.sum b/go.sum index 2fdb78dca4..9cc9fad3fd 100644 --- a/go.sum +++ b/go.sum @@ -48,9 +48,9 @@ github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOv github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Kubuxu/go-os-helper v0.0.1/go.mod h1:N8B+I7vPCT80IcP58r50u4+gEEcsZETFUpAzWW2ep1Y= -github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= @@ -168,8 +168,7 @@ github.com/dgraph-io/badger v1.6.0-rc1/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhY github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= github.com/dgraph-io/badger v1.6.1/go.mod h1:FRmFw3uxvcpa8zG3Rxs0th+hCLIuaQg8HlNV5bjgnuU= github.com/dgraph-io/badger v1.6.2 h1:mNw0qs90GVgGGWylh0umH5iag1j6n/PeJtNvL6KY/x8= -github.com/dgraph-io/badger/v3 v3.2103.5 h1:ylPa6qzbjYRQMU6jokoj4wzcaweHylt//CH0AKt0akg= -github.com/dgraph-io/badger/v3 v3.2103.5/go.mod h1:4MPiseMeDQ3FNCYwRbbcBOGJLf5jsE0PPFzRiKjtcdw= +github.com/dgraph-io/badger/v3 v3.2011.1 h1:Hmyof0WMEF/QtutX5SQHzIMnJQxb/IrSzhjckV2SD6g= github.com/dgraph-io/ristretto v0.0.2/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= @@ -209,13 +208,13 @@ github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiD github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= -github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= -github.com/fxamacker/cbor/v2 v2.4.0 h1:ri0ArlOR+5XunOP8CRUowT0pSJOwhW098ZCUyskZD88= -github.com/fxamacker/cbor/v2 v2.4.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= +github.com/fxamacker/cbor/v2 v2.5.0 h1:oHsG0V/Q6E/wqTS2O1Cozzsy69nqCiguo5Q1a1ADivE= +github.com/fxamacker/cbor/v2 v2.5.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= @@ -224,8 +223,8 @@ github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNIT github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= -github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= -github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-errors/errors v1.5.0 h1:/EuijeGOu7ckFxzhkj4CXJ8JaenxK7bKUxpPYqeLHqQ= +github.com/go-errors/errors v1.5.0/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -299,12 +298,10 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/flatbuffers v1.12.1/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v2.0.6+incompatible h1:XHFReMv7nFFusa+CEokzWbzaYocKXI6C7hdU5Kgh9Lw= github.com/google/flatbuffers v2.0.6+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -338,8 +335,8 @@ github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20230602150820-91b7bce49751 h1:hR7/MlvK23p6+lIw9SN1TigNLn9ZnF3W4SYRKq2gAHs= -github.com/google/pprof v0.0.0-20230602150820-91b7bce49751/go.mod h1:Jh3hGz2jkYak8qXPD19ryItVnUgpgeqzdkY/D0EaeuA= +github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8 h1:n6vlPhxsA+BW/XsS5+uqi7GyzaLa5MH7qlSLBZtRdiA= +github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8/go.mod h1:Jh3hGz2jkYak8qXPD19ryItVnUgpgeqzdkY/D0EaeuA= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -423,8 +420,8 @@ github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod github.com/ipfs/bbloom v0.0.1/go.mod h1:oqo8CVWsJFMOZqTglBG4wydCE4IQA/G2/SEofB0rjUI= github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs= github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0= -github.com/ipfs/boxo v0.10.2 h1:kspw9HmMyKzLQxpKk417sF69i6iuf50AXtRjFqCYyL4= -github.com/ipfs/boxo v0.10.2/go.mod h1:1qgKq45mPRCxf4ZPoJV2lnXxyxucigILMJOrQrVivv8= +github.com/ipfs/boxo v0.12.0 h1:AXHg/1ONZdRQHQLgG5JHsSC3XoE4DjCAMgK+asZvUcQ= +github.com/ipfs/boxo v0.12.0/go.mod h1:xAnfiU6PtxWCnRqu7dcXQ10bB5/kvI1kXRotuGqGBhg= github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA= github.com/ipfs/go-bitswap v0.0.9/go.mod h1:kAPf5qgn2W2DrgAcscZ3HrM9qh4pH+X8Fkk3UPrwvis= github.com/ipfs/go-bitswap v0.1.0/go.mod h1:FFJEf18E9izuCqUtHxbWEvq+reg7o4CW5wSAE1wsxj0= @@ -435,8 +432,8 @@ github.com/ipfs/go-bitswap v0.12.0/go.mod h1:Iwjkd6+vaDjVIa6b6ogmZgs+b5U3EkIFEX7 github.com/ipfs/go-block-format v0.0.1/go.mod h1:DK/YYcsSUIVAFNwo/KZCdIIbpN0ROH/baNLgayt4pFc= github.com/ipfs/go-block-format v0.0.2/go.mod h1:AWR46JfpcObNfg3ok2JHDUfdiHRgWhJgCQF+KIgOPJY= github.com/ipfs/go-block-format v0.0.3/go.mod h1:4LmD4ZUw0mhO+JSKdpWwrzATiEfM7WWgQ8H5l6P8MVk= -github.com/ipfs/go-block-format v0.1.2 h1:GAjkfhVx1f4YTODS6Esrj1wt2HhrtwTnhEr+DyPUaJo= -github.com/ipfs/go-block-format v0.1.2/go.mod h1:mACVcrxarQKstUU3Yf/RdwbC4DzPV6++rO2a3d+a/KE= +github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs= +github.com/ipfs/go-block-format v0.2.0/go.mod h1:+jpL11nFx5A/SPpsoBn6Bzkra/zaArfSmsknbPMYgzM= github.com/ipfs/go-blockservice v0.0.7/go.mod h1:EOfb9k/Y878ZTRY/CH0x5+ATtaipfbRhbvNSdgc/7So= github.com/ipfs/go-blockservice v0.1.0/go.mod h1:hzmMScl1kXHg3M2BjTymbVPjv627N7sYcvYaKbop39M= github.com/ipfs/go-blockservice v0.2.1/go.mod h1:k6SiwmgyYgs4M/qt+ww6amPeUH9EISLRBnvUurKJhi8= @@ -480,7 +477,8 @@ github.com/ipfs/go-fetcher v1.6.1/go.mod h1:27d/xMV8bodjVs9pugh/RCjjK2OZ68UgAMsp github.com/ipfs/go-ipfs-blockstore v0.0.1/go.mod h1:d3WClOmRQKFnJ0Jz/jj/zmksX0ma1gROTlovZKBmN08= github.com/ipfs/go-ipfs-blockstore v0.1.0/go.mod h1:5aD0AvHPi7mZc6Ci1WCAhiBQu2IsfTduLl+422H6Rqw= github.com/ipfs/go-ipfs-blockstore v0.2.1/go.mod h1:jGesd8EtCM3/zPgx+qr0/feTXGUeRai6adgwC+Q+JvE= -github.com/ipfs/go-ipfs-blockstore v1.3.0 h1:m2EXaWgwTzAfsmt5UdJ7Is6l4gJcaM/A12XwJyvYvMM= +github.com/ipfs/go-ipfs-blockstore v1.2.0 h1:n3WTeJ4LdICWs/0VSfjHrlqpPpl6MZ+ySd3j8qz0ykw= +github.com/ipfs/go-ipfs-blockstore v1.2.0/go.mod h1:eh8eTFLiINYNSNawfZOC7HOxNTxpB1PFuA5E1m/7exE= github.com/ipfs/go-ipfs-blocksutil v0.0.1 h1:Eh/H4pc1hsvhzsQoMEP3Bke/aW5P5rVM1IWFJMcGIPQ= github.com/ipfs/go-ipfs-blocksutil v0.0.1/go.mod h1:Yq4M86uIOmxmGPUHv/uI7uKqZNtLb449gwKqXjIsnRk= github.com/ipfs/go-ipfs-chunker v0.0.1/go.mod h1:tWewYK0we3+rMbOh7pPFGDyypCtvGcBFymgY4rSDLAw= @@ -493,6 +491,7 @@ github.com/ipfs/go-ipfs-delay v0.0.1/go.mod h1:8SP1YXK1M1kXuc4KJZINY3TQQ03J2rwBG github.com/ipfs/go-ipfs-ds-help v0.0.1/go.mod h1:gtP9xRaZXqIQRh1HRpp595KbBEdgqWFxefeVKOV8sxo= github.com/ipfs/go-ipfs-ds-help v0.1.1/go.mod h1:SbBafGJuGsPI/QL3j9Fc5YPLeAu+SzOkI0gFwAg+mOs= github.com/ipfs/go-ipfs-ds-help v1.1.0 h1:yLE2w9RAsl31LtfMt91tRZcrx+e61O5mDxFRR994w4Q= +github.com/ipfs/go-ipfs-ds-help v1.1.0/go.mod h1:YR5+6EaebOhfcqVCyqemItCLthrpVNot+rsOU/5IatU= github.com/ipfs/go-ipfs-exchange-interface v0.0.1/go.mod h1:c8MwfHjtQjPoDyiy9cFquVtVHkO9b9Ob3FG91qJnWCM= github.com/ipfs/go-ipfs-exchange-interface v0.1.0/go.mod h1:ych7WPlyHqFvCi/uQI48zLZuAWVP5iTQPXEfVaw5WEI= github.com/ipfs/go-ipfs-exchange-interface v0.2.0 h1:8lMSJmKogZYNo2jjhUs0izT+dck05pqUw4mWNW9Pw6Y= @@ -526,11 +525,13 @@ github.com/ipfs/go-ipld-cbor v0.0.6/go.mod h1:ssdxxaLJPXH7OjF5V4NSjBbcfh+evoR4uk github.com/ipfs/go-ipld-format v0.0.1/go.mod h1:kyJtbkDALmFHv3QR6et67i35QzO3S0dCDnkOJhcZkms= github.com/ipfs/go-ipld-format v0.0.2/go.mod h1:4B6+FM2u9OJ9zCV+kSbgFAZlOrv1Hqbf0INGQgiKf9k= github.com/ipfs/go-ipld-format v0.2.0/go.mod h1:3l3C1uKoadTPbeNfrDi+xMInYKlx2Cvg1BuydPSdzQs= -github.com/ipfs/go-ipld-format v0.5.0 h1:WyEle9K96MSrvr47zZHKKcDxJ/vlpET6PSiQsAFO+Ds= -github.com/ipfs/go-ipld-format v0.5.0/go.mod h1:ImdZqJQaEouMjCvqCe0ORUS+uoBmf7Hf+EO/jh+nk3M= +github.com/ipfs/go-ipld-format v0.3.0/go.mod h1:co/SdBE8h99968X0hViiw1MNlh6fvxxnHpvVLnH7jSM= +github.com/ipfs/go-ipld-format v0.6.0 h1:VEJlA2kQ3LqFSIm5Vu6eIlSxD/Ze90xtc4Meten1F5U= +github.com/ipfs/go-ipld-format v0.6.0/go.mod h1:g4QVMTn3marU3qXchwjpKPKgJv+zF+OlaKMyhJ4LHPg= github.com/ipfs/go-ipld-legacy v0.2.1 h1:mDFtrBpmU7b//LzLSypVrXsD8QxkEWxu5qVxN99/+tk= github.com/ipfs/go-ipld-legacy v0.2.1/go.mod h1:782MOUghNzMO2DER0FlBR94mllfdCJCkTtDtPM51otM= github.com/ipfs/go-ipns v0.3.0 h1:ai791nTgVo+zTuq2bLvEGmWP1M0A6kGTXUsgv/Yq67A= +github.com/ipfs/go-ipns v0.3.0/go.mod h1:3cLT2rbvgPZGkHJoPO1YMJeh6LtkxopCkKFcio/wE24= github.com/ipfs/go-libipfs v0.7.0 h1:Mi54WJTODaOL2/ZSm5loi3SwI3jI2OuFWUrQIkJ5cpM= github.com/ipfs/go-libipfs v0.7.0/go.mod h1:KsIf/03CqhICzyRGyGo68tooiBE2iFbI/rXW7FhAYr0= github.com/ipfs/go-log v0.0.1/go.mod h1:kL1d2/hzSpI0thNYjiKfjanbVNU+IIGA/WnNESY9leM= @@ -570,8 +571,8 @@ github.com/ipfs/interface-go-ipfs-core v0.10.0/go.mod h1:F3EcmDy53GFkF0H3iEJpfJC github.com/ipld/go-codec-dagpb v1.6.0 h1:9nYazfyu9B1p3NAgfVdpRco3Fs2nFC72DqVsMj6rOcc= github.com/ipld/go-codec-dagpb v1.6.0/go.mod h1:ANzFhfP2uMJxRBr8CE+WQWs5UsNa0pYtmKZ+agnUw9s= github.com/ipld/go-ipld-prime v0.11.0/go.mod h1:+WIAkokurHmZ/KwzDOMUuoeJgaRQktHtEaLglS3ZeV8= -github.com/ipld/go-ipld-prime v0.20.0 h1:Ud3VwE9ClxpO2LkCYP7vWPc0Fo+dYdYzgxUJZ3uRG4g= -github.com/ipld/go-ipld-prime v0.20.0/go.mod h1:PzqZ/ZR981eKbgdr3y2DJYeD/8bgMawdGVlJDE8kK+M= +github.com/ipld/go-ipld-prime v0.21.0 h1:n4JmcpOlPDIxBcY037SVfpd1G+Sj1nKZah0m6QH9C2E= +github.com/ipld/go-ipld-prime v0.21.0/go.mod h1:3RLqy//ERg/y5oShXXdx5YIp50cFGOanyMctpPjsvxQ= github.com/jackpal/gateway v1.0.5/go.mod h1:lTpwd4ACLXmpyiCTRtfiNyVnUmqT9RivzCDQetPfnjA= github.com/jackpal/go-nat-pmp v1.0.1/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus= @@ -609,9 +610,8 @@ github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQL github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= -github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI= -github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= +github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= @@ -660,8 +660,8 @@ github.com/libp2p/go-libp2p v0.7.0/go.mod h1:hZJf8txWeCduQRDC/WSqBGMxaTHCOYHt2xS github.com/libp2p/go-libp2p v0.7.4/go.mod h1:oXsBlTLF1q7pxr+9w6lqzS1ILpyHsaBPniVO7zIHGMw= github.com/libp2p/go-libp2p v0.8.1/go.mod h1:QRNH9pwdbEBpx5DTJYg+qxcVaDMAz3Ee/qDKwXujH5o= github.com/libp2p/go-libp2p v0.14.3/go.mod h1:d12V4PdKbpL0T1/gsUNN8DfgMuRPDX8bS2QxCZlwRH0= -github.com/libp2p/go-libp2p v0.28.0 h1:zO8cY98nJiPzZpFv5w5gqqb8aVzt4ukQ0nVOSaaKhJ8= -github.com/libp2p/go-libp2p v0.28.0/go.mod h1:s3Xabc9LSwOcnv9UD4nORnXKTsWkPMkIMB/JIGXVnzk= +github.com/libp2p/go-libp2p v0.29.2 h1:uPw/c8hOxoLP/KhFnzlc5Ejqf+OmAL1dwIsqE31WBtY= +github.com/libp2p/go-libp2p v0.29.2/go.mod h1:OU7nSq0aEZMsV2wY8nXn1+XNNt9q2UiR8LjW3Kmp2UE= github.com/libp2p/go-libp2p-asn-util v0.3.0 h1:gMDcMyYiZKkocGXDQ5nsUQyquC9+H+iLEQHwOCZ7s8s= github.com/libp2p/go-libp2p-asn-util v0.3.0/go.mod h1:B1mcOrKUE35Xq/ASTmQ4tN3LNzVVaMNmq2NACuqyB9w= github.com/libp2p/go-libp2p-autonat v0.0.6/go.mod h1:uZneLdOkZHro35xIhpbtTzLlgYturpu4J5+0cZK3MqE= @@ -723,10 +723,10 @@ github.com/libp2p/go-libp2p-interface-connmgr v0.0.1/go.mod h1:GarlRLH0LdeWcLnYM github.com/libp2p/go-libp2p-interface-connmgr v0.0.4/go.mod h1:GarlRLH0LdeWcLnYM/SaBykKFl9U5JFnbBGruAk/D5k= github.com/libp2p/go-libp2p-interface-connmgr v0.0.5/go.mod h1:GarlRLH0LdeWcLnYM/SaBykKFl9U5JFnbBGruAk/D5k= github.com/libp2p/go-libp2p-interface-pnet v0.0.1/go.mod h1:el9jHpQAXK5dnTpKA4yfCNBZXvrzdOU75zz+C6ryp3k= -github.com/libp2p/go-libp2p-kad-dht v0.24.2 h1:zd7myKBKCmtZBhI3I0zm8xBkb28v3gmSEtQfBdAdFwc= -github.com/libp2p/go-libp2p-kad-dht v0.24.2/go.mod h1:BShPzRbK6+fN3hk8a0WGAYKpb8m4k+DtchkqouGTrSg= -github.com/libp2p/go-libp2p-kbucket v0.6.3 h1:p507271wWzpy2f1XxPzCQG9NiN6R6lHL9GiSErbQQo0= -github.com/libp2p/go-libp2p-kbucket v0.6.3/go.mod h1:RCseT7AH6eJWxxk2ol03xtP9pEHetYSPXOaJnOiD8i0= +github.com/libp2p/go-libp2p-kad-dht v0.23.0 h1:sxE6LxLopp79eLeV695n7+c77V/Vn4AMF28AdM/XFqM= +github.com/libp2p/go-libp2p-kad-dht v0.23.0/go.mod h1:oO5N308VT2msnQI6qi5M61wzPmJYg7Tr9e16m5n7uDU= +github.com/libp2p/go-libp2p-kbucket v0.6.0 h1:1uyqIdE6X7ihtbNg+vRc9EQEmZPEBaehvJ2W14rUrRQ= +github.com/libp2p/go-libp2p-kbucket v0.6.0/go.mod h1:efnPrfoP+WT/ONcC5eB0iADCDIJFXauXhylgJYO+VWw= github.com/libp2p/go-libp2p-loggables v0.0.1/go.mod h1:lDipDlBNYbpyqyPX/KcoO+eq0sJYEVR2JgOexcivchg= github.com/libp2p/go-libp2p-loggables v0.1.0/go.mod h1:EyumB2Y6PrYjr55Q3/tiJ/o3xoDasoRYM7nOzEpoa90= github.com/libp2p/go-libp2p-metrics v0.0.1/go.mod h1:jQJ95SXXA/K1VZi13h52WZMa9ja78zjyy5rspMsC/08= @@ -882,8 +882,8 @@ github.com/libp2p/go-yamux v1.3.7/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/h github.com/libp2p/go-yamux v1.4.0/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/hkmpooHE= github.com/libp2p/go-yamux v1.4.1/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/hkmpooHE= github.com/libp2p/go-yamux/v2 v2.2.0/go.mod h1:3So6P6TV6r75R9jiBpiIKgU/66lOarCZjqROGxzPpPQ= -github.com/libp2p/go-yamux/v4 v4.0.0 h1:+Y80dV2Yx/kv7Y7JKu0LECyVdMXm1VUoko+VQ9rBfZQ= -github.com/libp2p/go-yamux/v4 v4.0.0/go.mod h1:NWjl8ZTLOGlozrXSOZ/HlfG++39iKNnM5wwmtQP1YB4= +github.com/libp2p/go-yamux/v4 v4.0.1 h1:FfDR4S1wj6Bw2Pqbc8Uz7pCxeRBPbwsBbEdfwiCypkQ= +github.com/libp2p/go-yamux/v4 v4.0.1/go.mod h1:NWjl8ZTLOGlozrXSOZ/HlfG++39iKNnM5wwmtQP1YB4= github.com/libp2p/zeroconf/v2 v2.2.0 h1:Cup06Jv6u81HLhIj1KasuNM/RHHrJ8T7wOTS4+Tv53Q= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= @@ -921,8 +921,8 @@ github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3N github.com/miekg/dns v1.1.12/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.28/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/miekg/dns v1.1.54 h1:5jon9mWcb0sFJGpnI99tOMhCPyJ+RPVz5b63MQG0VWI= -github.com/miekg/dns v1.1.54/go.mod h1:uInx36IzPl7FYnDcMeVWxj9byh7DutNykX4G9Sj60FY= +github.com/miekg/dns v1.1.55 h1:GoQ4hpsj0nFLYe+bWiCToyrBEJXkQfOOIvFGFy0lEgo= +github.com/miekg/dns v1.1.55/go.mod h1:uInx36IzPl7FYnDcMeVWxj9byh7DutNykX4G9Sj60FY= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c h1:bzE/A84HN25pxAuk9Eej1Kz9OUelF97nAc82bDquQI8= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c/go.mod h1:0SQS9kMwD2VsyFEB++InYyBJroV/FRmBgcydeSUcJms= github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b h1:z78hV3sbSMAUoyUMM0I83AUIT6Hu17AWfgjzIbtrYFc= @@ -1035,6 +1035,7 @@ github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OS github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= @@ -1044,15 +1045,16 @@ github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= -github.com/onsi/ginkgo/v2 v2.9.7 h1:06xGQy5www2oN160RtEZoTvnP2sPhEfePYmCDc2szss= -github.com/onsi/ginkgo/v2 v2.9.7/go.mod h1:cxrmXWykAwTwhQsJOPfdIDiJ+l2RYq7U8hFU+M/1uw0= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo/v2 v2.11.0 h1:WgqUCUt/lT6yXoQ8Wef0fsNn5cAuMK7+KT9UFRz2tcU= +github.com/onsi/ginkgo/v2 v2.11.0/go.mod h1:ZhrRA5XmEE3x3rhlzamx/JJvujdZoJ2uvgI7kR0iZvM= github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.27.7 h1:fVih9JD6ogIiHUN6ePK7HJidyEDpWGVB5mzM7cWNXoU= +github.com/onsi/gomega v1.27.8 h1:gegWiwZjBsf2DgiSbf5hpokZ98JVDMcWkUiigk6/KXc= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= github.com/opencontainers/runtime-spec v1.0.2 h1:UfAcuLBJB9Coz72x1hgl8O5RVzTdNiaglX6v2DM6FI0= github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= @@ -1084,8 +1086,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= -github.com/planetscale/vtprotobuf v0.4.0 h1:NEI+g4woRaAZgeZ3sAvbtyvMBRjIv5kE7EWYQ8m4JwY= -github.com/planetscale/vtprotobuf v0.4.0/go.mod h1:wm1N3qk9G/4+VM1WhpkLbvY/d8+0PbwYYpP5P5VhTks= +github.com/planetscale/vtprotobuf v0.5.0 h1:l8PXm6Colok5z6qQLNhAj2Jq5BfoMTIHxLER5a6nDqM= +github.com/planetscale/vtprotobuf v0.5.0/go.mod h1:wm1N3qk9G/4+VM1WhpkLbvY/d8+0PbwYYpP5P5VhTks= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/polydawn/refmt v0.0.0-20190221155625-df39d6c2d992/go.mod h1:uIp+gprXxxrWSjjklXD+mN4wed/tMfjMMmN/9+JsA9o= @@ -1130,12 +1132,12 @@ github.com/prometheus/procfs v0.9.0 h1:wzCHvIvM5SxWqYvwgVL7yJY8Lz3PKn49KQtpgMYJf github.com/prometheus/procfs v0.9.0/go.mod h1:+pB4zwohETzFnmlpe6yd2lSc+0/46IYZRB/chUwxUZY= github.com/quic-go/qpack v0.4.0 h1:Cr9BXA1sQS2SmDUWjSofMPNKmvF6IiIfDRmgU0w1ZCo= github.com/quic-go/qpack v0.4.0/go.mod h1:UZVnYIfi5GRk+zI9UMaCPsmZ2xKJP7XBUvVyT1Knj9A= -github.com/quic-go/qtls-go1-19 v0.3.2 h1:tFxjCFcTQzK+oMxG6Zcvp4Dq8dx4yD3dDiIiyc86Z5U= -github.com/quic-go/qtls-go1-19 v0.3.2/go.mod h1:ySOI96ew8lnoKPtSqx2BlI5wCpUVPT05RMAlajtnyOI= -github.com/quic-go/qtls-go1-20 v0.2.2 h1:WLOPx6OY/hxtTxKV1Zrq20FtXtDEkeY00CGQm8GEa3E= -github.com/quic-go/qtls-go1-20 v0.2.2/go.mod h1:JKtK6mjbAVcUTN/9jZpvLbGxvdWIKS8uT7EiStoU1SM= -github.com/quic-go/quic-go v0.33.0 h1:ItNoTDN/Fm/zBlq769lLJc8ECe9gYaW40veHCCco7y0= -github.com/quic-go/quic-go v0.33.0/go.mod h1:YMuhaAV9/jIu0XclDXwZPAsP/2Kgr5yMYhe9oxhhOFA= +github.com/quic-go/qtls-go1-19 v0.3.3 h1:wznEHvJwd+2X3PqftRha0SUKmGsnb6dfArMhy9PeJVE= +github.com/quic-go/qtls-go1-19 v0.3.3/go.mod h1:ySOI96ew8lnoKPtSqx2BlI5wCpUVPT05RMAlajtnyOI= +github.com/quic-go/qtls-go1-20 v0.2.3 h1:m575dovXn1y2ATOb1XrRFcrv0F+EQmlowTkoraNkDPI= +github.com/quic-go/qtls-go1-20 v0.2.3/go.mod h1:JKtK6mjbAVcUTN/9jZpvLbGxvdWIKS8uT7EiStoU1SM= +github.com/quic-go/quic-go v0.36.4 h1:CXn/ZLN5Vntlk53fjR+kUMC8Jt7flfQe+I5Ty5A+k0o= +github.com/quic-go/quic-go v0.36.4/go.mod h1:qxQumdeKw5GmWs1OsTZZnOxzSI+RJWuhf1O8FN35L2o= github.com/quic-go/webtransport-go v0.5.3 h1:5XMlzemqB4qmOlgIus5zB45AcZ2kCgCy2EptUrfOPWU= github.com/quic-go/webtransport-go v0.5.3/go.mod h1:OhmmgJIzTTqXK5xvtuX0oBpLV2GkLWNDA+UeTGJXErU= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= @@ -1193,6 +1195,8 @@ github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4k github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE= github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA= +github.com/sourcenetwork/badger/v4 v4.0.0-20230801145501-d3a57bd4c2ec h1:br39/Te7XrQmirI+QtT6YblhD9T6B2dzDNI9eoI26Pg= +github.com/sourcenetwork/badger/v4 v4.0.0-20230801145501-d3a57bd4c2ec/go.mod h1:qfCqhPoWDFJRx1gp5QwwyGo8xk1lbHUxvK9nK0OGAak= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.0-20230209220544-e16d5e34c4fc h1:ZrkklfmN27eENGFmzblEcs35PJ02hmlNgMHE8XJqFAo= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.0-20230209220544-e16d5e34c4fc/go.mod h1:3rOV6TxePSwADKpnwXBKpTjAA4QyjZBus13xc6VCtSw= github.com/sourcenetwork/graphql-go v0.7.10-0.20230511091704-fe7085512c23 h1:QcSWSYlE1alUC0uOO/trppYMLpR8OuFIL8IqR+PR5sA= @@ -1270,7 +1274,9 @@ github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXV github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY= github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49uaYMPRU= github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM= -github.com/warpfork/go-testmark v0.11.0 h1:J6LnV8KpceDvo7spaNU4+DauH2n1x+6RaO2rJrmpQ9U= +github.com/vito/go-sse v1.0.0 h1:e6/iTrrvy8BRrOwJwmQmlndlil+TLdxXvHi55ZDzH6M= +github.com/vito/go-sse v1.0.0/go.mod h1:2wkcaQ+jtlZ94Uve8gYZjFpL68luAjssTINA2hpgcZs= +github.com/warpfork/go-testmark v0.12.1 h1:rMgCpJfwy1sJ50x0M0NgyphxYYPMOODIJHhsXyEHU0s= github.com/warpfork/go-wish v0.0.0-20180510122957-5ad1f5abf436/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw= github.com/warpfork/go-wish v0.0.0-20190328234359-8b3e70f8e830/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw= github.com/warpfork/go-wish v0.0.0-20200122115046-b9ea61034e4a/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw= @@ -1315,31 +1321,30 @@ go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/otel v1.16.0 h1:Z7GVAX/UkAXPKsy94IU+i6thsQS4nb7LviLpnaNeW8s= -go.opentelemetry.io/otel v1.16.0/go.mod h1:vl0h9NUa1D5s1nv3A5vZOYWn8av4K8Ml6JDeHrT/bx4= -go.opentelemetry.io/otel/metric v1.16.0 h1:RbrpwVG1Hfv85LgnZ7+txXioPDoh6EdbZHo26Q3hqOo= -go.opentelemetry.io/otel/metric v1.16.0/go.mod h1:QE47cpOmkwipPiefDwo2wDzwJrlfxxNYodqc4xnGCo4= -go.opentelemetry.io/otel/sdk v1.16.0 h1:Z1Ok1YsijYL0CSJpHt4cS3wDDh7p572grzNrBMiMWgE= -go.opentelemetry.io/otel/sdk v1.16.0/go.mod h1:tMsIuKXuuIWPBAOrH+eHtvhTL+SntFtXF9QD68aP6p4= -go.opentelemetry.io/otel/sdk/metric v0.39.0 h1:Kun8i1eYf48kHH83RucG93ffz0zGV1sh46FAScOTuDI= -go.opentelemetry.io/otel/sdk/metric v0.39.0/go.mod h1:piDIRgjcK7u0HCL5pCA4e74qpK/jk3NiUoAHATVAmiI= -go.opentelemetry.io/otel/trace v1.16.0 h1:8JRpaObFoW0pxuVPapkgH8UhHQj+bJW8jJsCZEu5MQs= -go.opentelemetry.io/otel/trace v1.16.0/go.mod h1:Yt9vYq1SdNz3xdjZZK7wcXv1qv2pwLkqr2QVwea0ef0= +go.opentelemetry.io/otel v1.18.0 h1:TgVozPGZ01nHyDZxK5WGPFB9QexeTMXEH7+tIClWfzs= +go.opentelemetry.io/otel v1.18.0/go.mod h1:9lWqYO0Db579XzVuCKFNPDl4s73Voa+zEck3wHaAYQI= +go.opentelemetry.io/otel/metric v1.18.0 h1:JwVzw94UYmbx3ej++CwLUQZxEODDj/pOuTCvzhtRrSQ= +go.opentelemetry.io/otel/metric v1.18.0/go.mod h1:nNSpsVDjWGfb7chbRLUNW+PBNdcSTHD4Uu5pfFMOI0k= +go.opentelemetry.io/otel/sdk v1.17.0 h1:FLN2X66Ke/k5Sg3V623Q7h7nt3cHXaW1FOvKKrW0IpE= +go.opentelemetry.io/otel/sdk v1.17.0/go.mod h1:U87sE0f5vQB7hwUoW98pW5Rz4ZDuCFBZFNUBlSgmDFQ= +go.opentelemetry.io/otel/sdk/metric v0.40.0 h1:qOM29YaGcxipWjL5FzpyZDpCYrDREvX0mVlmXdOjCHU= +go.opentelemetry.io/otel/sdk/metric v0.40.0/go.mod h1:dWxHtdzdJvg+ciJUKLTKwrMe5P6Dv3FyDbh8UkfgkVs= +go.opentelemetry.io/otel/trace v1.18.0 h1:NY+czwbHbmndxojTEKiSMHkG2ClNH2PwmcHrdo0JY10= +go.opentelemetry.io/otel/trace v1.18.0/go.mod h1:T2+SGJGuYZY3bjj5rgh/hN7KIrlpWC5nS8Mjvzckz+0= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= -go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/dig v1.17.0 h1:5Chju+tUvcC+N7N6EV08BJz41UZuO3BmHcN4A287ZLI= go.uber.org/dig v1.17.0/go.mod h1:rTxpf7l5I0eBTlE6/9RL+lDybC7WFwY2QH55ZSjy1mU= -go.uber.org/fx v1.19.2 h1:SyFgYQFr1Wl0AYstE8vyYIzP4bFz2URrScjwC4cwUvY= -go.uber.org/fx v1.19.2/go.mod h1:43G1VcqSzbIv77y00p1DRAsyZS8WdzuYdhZXmEUkMyQ= +go.uber.org/fx v1.20.0 h1:ZMC/pnRvhsthOZh9MZjMq5U8Or3mA9zBSPaLnzs3ihQ= +go.uber.org/fx v1.20.0/go.mod h1:qCUj0btiR3/JnanEr1TYEePfSw6o/4qYJscgvzQ5Ub0= go.uber.org/goleak v1.0.0/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= -go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= +go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= @@ -1354,8 +1359,8 @@ go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc= go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= -go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= -go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= +go.uber.org/zap v1.25.0 h1:4Hvk6GtkucQ790dqmj7l1eEnRdKm3k3ZUrUMS2d5+5c= +go.uber.org/zap v1.25.0/go.mod h1:JIAUzQIH94IC4fOJQm7gMmBJP5k7wQfdcnYdPoEXJYk= go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE= golang.org/x/build v0.0.0-20190111050920-041ab4dc3f9d/go.mod h1:OWs+y06UdEOHN4y+MfF/py+xQ/tYqIWW03b70/CG9Rw= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -1387,8 +1392,8 @@ golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWP golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= -golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= +golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1399,8 +1404,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME= -golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= +golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 h1:MGwJjxBy0HJshjDNfLsYO8xppfqWlA5ZT9OhtUUhTNw= +golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1426,8 +1431,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= -golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180719180050-a680a1efc54d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1483,8 +1488,8 @@ golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLd golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210423184538-5f58ad60dda6/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= -golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= +golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1508,8 +1513,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI= -golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1595,8 +1600,8 @@ golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1607,8 +1612,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1675,8 +1680,8 @@ golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.9.1 h1:8WMNJAz3zrtPmnYC7ISf5dEn3MT0gY7jBJfw27yrrLo= -golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc= +golang.org/x/tools v0.11.0 h1:EMCa6U9S2LtZXLAMoWiR/R8dAQFRqbAitmbJ2UKhoi8= +golang.org/x/tools v0.11.0/go.mod h1:anzJrxPjNtfgiYQYirP2CPGzGLxrH2u2QBhn6Bf3qY8= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1759,8 +1764,8 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230711160842-782d3b101e98 h1:bVf09lpb+OJbByTj913DRJioFFAjf/ZGxEz7MajTp2U= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230711160842-782d3b101e98/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= @@ -1785,8 +1790,8 @@ google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= -google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.58.0 h1:32JY8YpPMSR45K+c3o6b8VL73V+rR8k+DeMIr4vRH8o= +google.golang.org/grpc v1.58.0/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -1817,6 +1822,7 @@ gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/src-d/go-cli.v0 v0.0.0-20181105080154-d492247bbc0d/go.mod h1:z+K8VcOYVYcSwSjGebuDL6176A1XskgbtNl64NSg+n8= gopkg.in/src-d/go-log.v1 v1.0.1/go.mod h1:GN34hKP0g305ysm2/hctJ0Y8nWP3zxXXJ8GFabTyABE= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= diff --git a/http/client.go b/http/client.go new file mode 100644 index 0000000000..16a8924a65 --- /dev/null +++ b/http/client.go @@ -0,0 +1,418 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + "net/url" + "strings" + + blockstore "github.com/ipfs/boxo/blockstore" + sse "github.com/vito/go-sse/sse" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/events" +) + +var _ client.DB = (*Client)(nil) + +// Client implements the client.DB interface over HTTP. +type Client struct { + http *httpClient +} + +func NewClient(rawURL string) (*Client, error) { + baseURL, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + httpClient := newHttpClient(baseURL.JoinPath("/api/v0")) + return &Client{httpClient}, nil +} + +func (c *Client) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + query := url.Values{} + if readOnly { + query.Add("read_only", "true") + } + + methodURL := c.http.baseURL.JoinPath("tx") + methodURL.RawQuery = query.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return nil, err + } + var txRes CreateTxResponse + if err := c.http.requestJson(req, &txRes); err != nil { + return nil, err + } + return &Transaction{txRes.ID, c.http}, nil +} + +func (c *Client) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + query := url.Values{} + if readOnly { + query.Add("read_only", "true") + } + + methodURL := c.http.baseURL.JoinPath("tx", "concurrent") + methodURL.RawQuery = query.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return nil, err + } + var txRes CreateTxResponse + if err := c.http.requestJson(req, &txRes); err != nil { + return nil, err + } + return &Transaction{txRes.ID, c.http}, nil +} + +func (c *Client) WithTxn(tx datastore.Txn) client.Store { + client := c.http.withTxn(tx.ID()) + return &Client{client} +} + +func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") + + body, err := json.Marshal(rep) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) error { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") + + body, err := json.Marshal(rep) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var reps []client.Replicator + if err := c.http.requestJson(req, &reps); err != nil { + return nil, err + } + return reps, nil +} + +func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) error { + methodURL := c.http.baseURL.JoinPath("p2p", "collections", collectionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) RemoveP2PCollection(ctx context.Context, collectionID string) error { + methodURL := c.http.baseURL.JoinPath("p2p", "collections", collectionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) GetAllP2PCollections(ctx context.Context) ([]string, error) { + methodURL := c.http.baseURL.JoinPath("p2p", "collections") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var cols []string + if err := c.http.requestJson(req, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (c *Client) BasicImport(ctx context.Context, filepath string) error { + methodURL := c.http.baseURL.JoinPath("backup", "import") + + body, err := json.Marshal(&client.BackupConfig{Filepath: filepath}) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) BasicExport(ctx context.Context, config *client.BackupConfig) error { + methodURL := c.http.baseURL.JoinPath("backup", "export") + + body, err := json.Marshal(config) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { + methodURL := c.http.baseURL.JoinPath("schema") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(schema)) + if err != nil { + return nil, err + } + var cols []client.CollectionDescription + if err := c.http.requestJson(req, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (c *Client) PatchSchema(ctx context.Context, patch string) error { + methodURL := c.http.baseURL.JoinPath("schema") + + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), strings.NewReader(patch)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) error { + return c.LensRegistry().SetMigration(ctx, config) +} + +func (c *Client) LensRegistry() client.LensRegistry { + return &LensRegistry{c.http} +} + +func (c *Client) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"name": []string{name}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return &Collection{c.http, description}, nil +} + +func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"schema_id": []string{schemaId}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return &Collection{c.http, description}, nil +} + +func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"version_id": []string{versionId}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return &Collection{c.http, description}, nil +} + +func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var descriptions []client.CollectionDescription + if err := c.http.requestJson(req, &descriptions); err != nil { + return nil, err + } + collections := make([]client.Collection, len(descriptions)) + for i, d := range descriptions { + collections[i] = &Collection{c.http, d} + } + return collections, nil +} + +func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { + methodURL := c.http.baseURL.JoinPath("indexes") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var indexes map[client.CollectionName][]client.IndexDescription + if err := c.http.requestJson(req, &indexes); err != nil { + return nil, err + } + return indexes, nil +} + +func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestResult { + methodURL := c.http.baseURL.JoinPath("graphql") + result := &client.RequestResult{} + + body, err := json.Marshal(&GraphQLRequest{query}) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + c.http.setDefaultHeaders(req) + + res, err := c.http.client.Do(req) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + if res.Header.Get("Content-Type") == "text/event-stream" { + result.Pub = c.execRequestSubscription(ctx, res.Body) + return result + } + // ignore close errors because they have + // no perceivable effect on the end user + // and cannot be reconciled easily + defer res.Body.Close() //nolint:errcheck + + data, err := io.ReadAll(res.Body) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + var response GraphQLResponse + if err = json.Unmarshal(data, &response); err != nil { + result.GQL.Errors = []error{err} + return result + } + result.GQL.Data = response.Data + result.GQL.Errors = response.Errors + return result +} + +func (c *Client) execRequestSubscription(ctx context.Context, r io.ReadCloser) *events.Publisher[events.Update] { + pubCh := events.New[events.Update](0, 0) + pub, err := events.NewPublisher[events.Update](pubCh, 0) + if err != nil { + return nil + } + + go func() { + eventReader := sse.NewReadCloser(r) + // ignore close errors because the status + // and body of the request are already + // checked and it cannot be handled properly + defer eventReader.Close() //nolint:errcheck + + for { + evt, err := eventReader.Next() + if err != nil { + return + } + var response GraphQLResponse + if err := json.Unmarshal(evt.Data, &response); err != nil { + return + } + pub.Publish(client.GQLResult{ + Errors: response.Errors, + Data: response.Data, + }) + } + }() + + return pub +} + +func (c *Client) PrintDump(ctx context.Context) error { + methodURL := c.http.baseURL.JoinPath("debug", "dump") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) Close(ctx context.Context) { + // do nothing +} + +func (c *Client) Root() datastore.RootStore { + panic("client side database") +} + +func (c *Client) Blockstore() blockstore.Blockstore { + panic("client side database") +} + +func (c *Client) Events() events.Events { + panic("client side database") +} + +func (c *Client) MaxTxnRetries() int { + panic("client side database") +} diff --git a/http/client_collection.go b/http/client_collection.go new file mode 100644 index 0000000000..16157a9f96 --- /dev/null +++ b/http/client_collection.go @@ -0,0 +1,419 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "net/http" + "net/url" + "strings" + + sse "github.com/vito/go-sse/sse" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/datastore" +) + +var _ client.Collection = (*Collection)(nil) + +// Collection implements the client.Collection interface over HTTP. +type Collection struct { + http *httpClient + desc client.CollectionDescription +} + +func (c *Collection) Description() client.CollectionDescription { + return c.desc +} + +func (c *Collection) Name() string { + return c.desc.Name +} + +func (c *Collection) Schema() client.SchemaDescription { + return c.desc.Schema +} + +func (c *Collection) ID() uint32 { + return c.desc.ID +} + +func (c *Collection) SchemaID() string { + return c.desc.Schema.SchemaID +} + +func (c *Collection) Create(ctx context.Context, doc *client.Document) error { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) + + // We must call this here, else the doc key on the given object will not match + // that of the document saved in the database + err := doc.RemapAliasFieldsAndDockey(c.Description().Schema.Fields) + if err != nil { + return err + } + + body, err := doc.String() + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + if err != nil { + return err + } + doc.Clean() + return nil +} + +func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) + + var docMapList []json.RawMessage + for _, doc := range docs { + // We must call this here, else the doc key on the given object will not match + // that of the document saved in the database + err := doc.RemapAliasFieldsAndDockey(c.Description().Schema.Fields) + if err != nil { + return err + } + + docMap, err := documentJSON(doc) + if err != nil { + return err + } + docMapList = append(docMapList, docMap) + } + body, err := json.Marshal(docMapList) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + if err != nil { + return err + } + for _, doc := range docs { + doc.Clean() + } + return nil +} + +func (c *Collection) Update(ctx context.Context, doc *client.Document) error { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, doc.Key().String()) + + body, err := documentJSON(doc) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + if err != nil { + return err + } + doc.Clean() + return nil +} + +func (c *Collection) Save(ctx context.Context, doc *client.Document) error { + _, err := c.Get(ctx, doc.Key(), true) + if err == nil { + return c.Update(ctx, doc) + } + if errors.Is(err, client.ErrDocumentNotFound) { + return c.Create(ctx, doc) + } + return err +} + +func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, docKey.String()) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return false, err + } + _, err = c.http.request(req) + if err != nil { + return false, err + } + return true, nil +} + +func (c *Collection) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { + _, err := c.Get(ctx, docKey, false) + if err != nil { + return false, err + } + return true, nil +} + +func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { + switch t := target.(type) { + case string, map[string]any, *request.Filter: + return c.UpdateWithFilter(ctx, t, updater) + case client.DocKey: + return c.UpdateWithKey(ctx, t, updater) + case []client.DocKey: + return c.UpdateWithKeys(ctx, t, updater) + default: + return nil, client.ErrInvalidUpdateTarget + } +} + +func (c *Collection) updateWith( + ctx context.Context, + request CollectionUpdateRequest, +) (*client.UpdateResult, error) { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) + + body, err := json.Marshal(request) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + var result client.UpdateResult + if err := c.http.requestJson(req, &result); err != nil { + return nil, err + } + return &result, nil +} + +func (c *Collection) UpdateWithFilter( + ctx context.Context, + filter any, + updater string, +) (*client.UpdateResult, error) { + return c.updateWith(ctx, CollectionUpdateRequest{ + Filter: filter, + Updater: updater, + }) +} + +func (c *Collection) UpdateWithKey( + ctx context.Context, + key client.DocKey, + updater string, +) (*client.UpdateResult, error) { + return c.updateWith(ctx, CollectionUpdateRequest{ + Key: key.String(), + Updater: updater, + }) +} + +func (c *Collection) UpdateWithKeys( + ctx context.Context, + docKeys []client.DocKey, + updater string, +) (*client.UpdateResult, error) { + var keys []string + for _, key := range docKeys { + keys = append(keys, key.String()) + } + return c.updateWith(ctx, CollectionUpdateRequest{ + Keys: keys, + Updater: updater, + }) +} + +func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { + switch t := target.(type) { + case string, map[string]any, *request.Filter: + return c.DeleteWithFilter(ctx, t) + case client.DocKey: + return c.DeleteWithKey(ctx, t) + case []client.DocKey: + return c.DeleteWithKeys(ctx, t) + default: + return nil, client.ErrInvalidDeleteTarget + } +} + +func (c *Collection) deleteWith( + ctx context.Context, + request CollectionDeleteRequest, +) (*client.DeleteResult, error) { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) + + body, err := json.Marshal(request) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + var result client.DeleteResult + if err := c.http.requestJson(req, &result); err != nil { + return nil, err + } + return &result, nil +} + +func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { + return c.deleteWith(ctx, CollectionDeleteRequest{ + Filter: filter, + }) +} + +func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { + return c.deleteWith(ctx, CollectionDeleteRequest{ + Key: docKey.String(), + }) +} + +func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { + var keys []string + for _, key := range docKeys { + keys = append(keys, key.String()) + } + return c.deleteWith(ctx, CollectionDeleteRequest{ + Keys: keys, + }) +} + +func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { + query := url.Values{} + if showDeleted { + query.Add("show_deleted", "true") + } + + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, key.String()) + methodURL.RawQuery = query.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var docMap map[string]any + if err := c.http.requestJson(req, &docMap); err != nil { + return nil, err + } + return client.NewDocFromMap(docMap) +} + +func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { + return &Collection{ + http: c.http.withTxn(tx.ID()), + desc: c.desc, + } +} + +func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + c.http.setDefaultHeaders(req) + + res, err := c.http.client.Do(req) + if err != nil { + return nil, err + } + docKeyCh := make(chan client.DocKeysResult) + + go func() { + eventReader := sse.NewReadCloser(res.Body) + // ignore close errors because the status + // and body of the request are already + // checked and it cannot be handled properly + defer eventReader.Close() //nolint:errcheck + defer close(docKeyCh) + + for { + evt, err := eventReader.Next() + if err != nil { + return + } + var res DocKeyResult + if err := json.Unmarshal(evt.Data, &res); err != nil { + return + } + key, err := client.NewDocKeyFromString(res.Key) + if err != nil { + return + } + docKey := client.DocKeysResult{ + Key: key, + } + if res.Error != "" { + docKey.Err = fmt.Errorf(res.Error) + } + docKeyCh <- docKey + } + }() + + return docKeyCh, nil +} + +func (c *Collection) CreateIndex( + ctx context.Context, + indexDesc client.IndexDescription, +) (client.IndexDescription, error) { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes") + + body, err := json.Marshal(&indexDesc) + if err != nil { + return client.IndexDescription{}, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return client.IndexDescription{}, err + } + var index client.IndexDescription + if err := c.http.requestJson(req, &index); err != nil { + return client.IndexDescription{}, err + } + return index, nil +} + +func (c *Collection) DropIndex(ctx context.Context, indexName string) error { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes", indexName) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var indexes []client.IndexDescription + if err := c.http.requestJson(req, &indexes); err != nil { + return nil, err + } + return c.desc.Indexes, nil +} diff --git a/http/client_lens.go b/http/client_lens.go new file mode 100644 index 0000000000..3c8c2fc903 --- /dev/null +++ b/http/client_lens.go @@ -0,0 +1,147 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + + "github.com/sourcenetwork/immutable/enumerable" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +var _ client.LensRegistry = (*LensRegistry)(nil) + +// LensRegistry implements the client.LensRegistry interface over HTTP. +type LensRegistry struct { + http *httpClient +} + +func (c *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { + http := c.http.withTxn(tx.ID()) + return &LensRegistry{http} +} + +func (c *LensRegistry) SetMigration(ctx context.Context, config client.LensConfig) error { + methodURL := c.http.baseURL.JoinPath("lens") + + body, err := json.Marshal(config) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *LensRegistry) ReloadLenses(ctx context.Context) error { + methodURL := c.http.baseURL.JoinPath("lens", "reload") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *LensRegistry) MigrateUp( + ctx context.Context, + src enumerable.Enumerable[map[string]any], + schemaVersionID string, +) (enumerable.Enumerable[map[string]any], error) { + methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID, "up") + + var data []map[string]any + err := enumerable.ForEach(src, func(item map[string]any) { + data = append(data, item) + }) + if err != nil { + return nil, err + } + body, err := json.Marshal(data) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + var result []map[string]any + if err := c.http.requestJson(req, &result); err != nil { + return nil, err + } + return enumerable.New(result), nil +} + +func (c *LensRegistry) MigrateDown( + ctx context.Context, + src enumerable.Enumerable[map[string]any], + schemaVersionID string, +) (enumerable.Enumerable[map[string]any], error) { + methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID, "down") + + var data []map[string]any + err := enumerable.ForEach(src, func(item map[string]any) { + data = append(data, item) + }) + if err != nil { + return nil, err + } + body, err := json.Marshal(data) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + var result []map[string]any + if err := c.http.requestJson(req, &result); err != nil { + return nil, err + } + return enumerable.New(result), nil +} + +func (c *LensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) { + methodURL := c.http.baseURL.JoinPath("lens") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var cfgs []client.LensConfig + if err := c.http.requestJson(req, &cfgs); err != nil { + return nil, err + } + return cfgs, nil +} + +func (c *LensRegistry) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { + methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return false, err + } + _, err = c.http.request(req) + if err != nil { + return false, err + } + return true, nil +} diff --git a/http/client_tx.go b/http/client_tx.go new file mode 100644 index 0000000000..8df82007a6 --- /dev/null +++ b/http/client_tx.go @@ -0,0 +1,84 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + "fmt" + "net/http" + + "github.com/sourcenetwork/defradb/datastore" +) + +var _ datastore.Txn = (*Transaction)(nil) + +// Transaction implements the datastore.Txn interface over HTTP. +type Transaction struct { + id uint64 + http *httpClient +} + +func (c *Transaction) ID() uint64 { + return c.id +} + +func (c *Transaction) Commit(ctx context.Context) error { + methodURL := c.http.baseURL.JoinPath("tx", fmt.Sprintf("%d", c.id)) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Transaction) Discard(ctx context.Context) { + methodURL := c.http.baseURL.JoinPath("tx", fmt.Sprintf("%d", c.id)) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return + } + c.http.request(req) //nolint:errcheck +} + +func (c *Transaction) OnSuccess(fn func()) { + panic("client side transaction") +} + +func (c *Transaction) OnError(fn func()) { + panic("client side transaction") +} + +func (c *Transaction) OnDiscard(fn func()) { + panic("client side transaction") +} + +func (c *Transaction) Rootstore() datastore.DSReaderWriter { + panic("client side transaction") +} + +func (c *Transaction) Datastore() datastore.DSReaderWriter { + panic("client side transaction") +} + +func (c *Transaction) Headstore() datastore.DSReaderWriter { + panic("client side transaction") +} + +func (c *Transaction) DAGstore() datastore.DAGStore { + panic("client side transaction") +} + +func (c *Transaction) Systemstore() datastore.DSReaderWriter { + panic("client side transaction") +} diff --git a/http/errors.go b/http/errors.go new file mode 100644 index 0000000000..c2808603cf --- /dev/null +++ b/http/errors.go @@ -0,0 +1,51 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "errors" +) + +const ( + errInvalidRequestBody = "invalid request body" + errDocKeyDoesNotMatch = "document key does not match" + errStreamingNotSupported = "streaming not supported" + errMigrationNotFound = "migration not found" + errMissingRequest = "missing request" + errInvalidTransactionId = "invalid transaction id" +) + +var ( + ErrInvalidRequestBody = errors.New(errInvalidRequestBody) + ErrDocKeyDoesNotMatch = errors.New(errDocKeyDoesNotMatch) + ErrStreamingNotSupported = errors.New(errStreamingNotSupported) + ErrMigrationNotFound = errors.New(errMigrationNotFound) + ErrMissingRequest = errors.New(errMissingRequest) + ErrInvalidTransactionId = errors.New(errInvalidTransactionId) +) + +type errorResponse struct { + Error error `json:"error"` +} + +func (e errorResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]any{"error": e.Error.Error()}) +} + +func (e *errorResponse) UnmarshalJSON(data []byte) error { + var out map[string]any + if err := json.Unmarshal(data, &out); err != nil { + return err + } + e.Error = parseError(out["error"]) + return nil +} diff --git a/http/handler_collection.go b/http/handler_collection.go new file mode 100644 index 0000000000..8f8ff8423b --- /dev/null +++ b/http/handler_collection.go @@ -0,0 +1,328 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + + "github.com/go-chi/chi/v5" + + "github.com/sourcenetwork/defradb/client" +) + +type collectionHandler struct{} + +type CollectionDeleteRequest struct { + Key string `json:"key"` + Keys []string `json:"keys"` + Filter any `json:"filter"` +} + +type CollectionUpdateRequest struct { + Key string `json:"key"` + Keys []string `json:"keys"` + Filter any `json:"filter"` + Updater string `json:"updater"` +} + +func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + var body any + if err := requestJSON(req, &body); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + switch t := body.(type) { + case []map[string]any: + var docList []*client.Document + for _, docMap := range t { + doc, err := client.NewDocFromMap(docMap) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + docList = append(docList, doc) + } + if err := col.CreateMany(req.Context(), docList); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) + case map[string]any: + doc, err := client.NewDocFromMap(t) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + if err := col.Create(req.Context(), doc); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) + default: + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) + } +} + +func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + var request CollectionDeleteRequest + if err := requestJSON(req, &request); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + switch { + case request.Filter != nil: + result, err := col.DeleteWith(req.Context(), request.Filter) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, result) + case request.Key != "": + docKey, err := client.NewDocKeyFromString(request.Key) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + result, err := col.DeleteWith(req.Context(), docKey) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, result) + case request.Keys != nil: + var docKeys []client.DocKey + for _, key := range request.Keys { + docKey, err := client.NewDocKeyFromString(key) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + docKeys = append(docKeys, docKey) + } + result, err := col.DeleteWith(req.Context(), docKeys) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, result) + default: + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) + } +} + +func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + var request CollectionUpdateRequest + if err := requestJSON(req, &request); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + switch { + case request.Filter != nil: + result, err := col.UpdateWith(req.Context(), request.Filter, request.Updater) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, result) + case request.Key != "": + docKey, err := client.NewDocKeyFromString(request.Key) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + result, err := col.UpdateWith(req.Context(), docKey, request.Updater) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, result) + case request.Keys != nil: + var docKeys []client.DocKey + for _, key := range request.Keys { + docKey, err := client.NewDocKeyFromString(key) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + docKeys = append(docKeys, docKey) + } + result, err := col.UpdateWith(req.Context(), docKeys, request.Updater) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, result) + default: + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) + } +} + +func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + doc, err := col.Get(req.Context(), docKey, true) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + patch, err := io.ReadAll(req.Body) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + if err := doc.SetWithJSON(patch); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + err = col.Update(req.Context(), doc) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + _, err = col.Delete(req.Context(), docKey) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + showDeleted, _ := strconv.ParseBool(req.URL.Query().Get("show_deleted")) + + docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + doc, err := col.Get(req.Context(), docKey, showDeleted) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + docMap, err := doc.ToMap() + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, docMap) +} + +type DocKeyResult struct { + Key string `json:"key"` + Error string `json:"error"` +} + +func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + flusher, ok := rw.(http.Flusher) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrStreamingNotSupported}) + return + } + + docKeyCh, err := col.GetAllDocKeys(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + rw.Header().Set("Content-Type", "text/event-stream") + rw.Header().Set("Cache-Control", "no-cache") + rw.Header().Set("Connection", "keep-alive") + + rw.WriteHeader(http.StatusOK) + flusher.Flush() + + for docKey := range docKeyCh { + results := &DocKeyResult{ + Key: docKey.Key.String(), + } + if docKey.Err != nil { + results.Error = docKey.Err.Error() + } + data, err := json.Marshal(results) + if err != nil { + return + } + fmt.Fprintf(rw, "data: %s\n\n", data) + flusher.Flush() + } +} + +func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + var indexDesc client.IndexDescription + if err := requestJSON(req, &indexDesc); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + index, err := col.CreateIndex(req.Context(), indexDesc) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, index) +} + +func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + indexes, err := col.GetIndexes(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, indexes) +} + +func (s *collectionHandler) DropIndex(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + err := col.DropIndex(req.Context(), chi.URLParam(req, "index")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} diff --git a/http/handler_lens.go b/http/handler_lens.go new file mode 100644 index 0000000000..ccf8dd01a8 --- /dev/null +++ b/http/handler_lens.go @@ -0,0 +1,107 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/sourcenetwork/immutable/enumerable" + + "github.com/sourcenetwork/defradb/client" +) + +type lensHandler struct{} + +func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) + + err := lens.ReloadLenses(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) + + var cfg client.LensConfig + if err := requestJSON(req, &cfg); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + err := lens.SetMigration(req.Context(), cfg) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) + + var src []map[string]any + if err := requestJSON(req, &src); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + result, err := lens.MigrateUp(req.Context(), enumerable.New(src), chi.URLParam(req, "version")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, result) +} + +func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) + + var src []map[string]any + if err := requestJSON(req, &src); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + result, err := lens.MigrateDown(req.Context(), enumerable.New(src), chi.URLParam(req, "version")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, result) +} + +func (s *lensHandler) Config(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) + + cfgs, err := lens.Config(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, cfgs) +} + +func (s *lensHandler) HasMigration(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) + + exists, err := lens.HasMigration(req.Context(), chi.URLParam(req, "version")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + if !exists { + responseJSON(rw, http.StatusNotFound, errorResponse{ErrMigrationNotFound}) + return + } + rw.WriteHeader(http.StatusOK) +} diff --git a/http/handler_store.go b/http/handler_store.go new file mode 100644 index 0000000000..d0cbdf42d2 --- /dev/null +++ b/http/handler_store.go @@ -0,0 +1,332 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/go-chi/chi/v5" + + "github.com/sourcenetwork/defradb/client" +) + +type storeHandler struct{} + +func (s *storeHandler) SetReplicator(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + var rep client.Replicator + if err := requestJSON(req, &rep); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + err := store.SetReplicator(req.Context(), rep) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *storeHandler) DeleteReplicator(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + var rep client.Replicator + if err := requestJSON(req, &rep); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + err := store.DeleteReplicator(req.Context(), rep) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *storeHandler) GetAllReplicators(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + reps, err := store.GetAllReplicators(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, reps) +} + +func (s *storeHandler) AddP2PCollection(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + err := store.AddP2PCollection(req.Context(), chi.URLParam(req, "id")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *storeHandler) RemoveP2PCollection(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + err := store.RemoveP2PCollection(req.Context(), chi.URLParam(req, "id")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *storeHandler) GetAllP2PCollections(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + cols, err := store.GetAllP2PCollections(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, cols) +} + +func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + var config client.BackupConfig + if err := requestJSON(req, &config); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + err := store.BasicImport(req.Context(), config.Filepath) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + var config client.BackupConfig + if err := requestJSON(req, &config); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + err := store.BasicExport(req.Context(), &config) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + schema, err := io.ReadAll(req.Body) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + cols, err := store.AddSchema(req.Context(), string(schema)) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, cols) +} + +func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + patch, err := io.ReadAll(req.Body) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + err = store.PatchSchema(req.Context(), string(patch)) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + switch { + case req.URL.Query().Has("name"): + col, err := store.GetCollectionByName(req.Context(), req.URL.Query().Get("name")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, col.Description()) + case req.URL.Query().Has("schema_id"): + col, err := store.GetCollectionBySchemaID(req.Context(), req.URL.Query().Get("schema_id")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, col.Description()) + case req.URL.Query().Has("version_id"): + col, err := store.GetCollectionByVersionID(req.Context(), req.URL.Query().Get("version_id")) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, col.Description()) + default: + cols, err := store.GetAllCollections(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + colDesc := make([]client.CollectionDescription, len(cols)) + for i, col := range cols { + colDesc[i] = col.Description() + } + responseJSON(rw, http.StatusOK, colDesc) + } +} + +func (s *storeHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + indexes, err := store.GetAllIndexes(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, indexes) +} + +func (s *storeHandler) PrintDump(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) + + if err := db.PrintDump(req.Context()); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + +type GraphQLRequest struct { + Query string `json:"query"` +} + +type GraphQLResponse struct { + Data any `json:"data"` + Errors []error `json:"errors,omitempty"` +} + +func (res GraphQLResponse) MarshalJSON() ([]byte, error) { + var errors []string + for _, err := range res.Errors { + errors = append(errors, err.Error()) + } + return json.Marshal(map[string]any{"data": res.Data, "errors": errors}) +} + +func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { + // decode numbers to json.Number + dec := json.NewDecoder(bytes.NewBuffer(data)) + dec.UseNumber() + + var out map[string]any + if err := dec.Decode(&out); err != nil { + return err + } + + // fix errors type to match tests + switch t := out["errors"].(type) { + case []any: + for _, v := range t { + res.Errors = append(res.Errors, parseError(v)) + } + default: + res.Errors = nil + } + + // fix data type to match tests + switch t := out["data"].(type) { + case []any: + var fixed []map[string]any + for _, v := range t { + fixed = append(fixed, v.(map[string]any)) + } + res.Data = fixed + case map[string]any: + res.Data = t + default: + res.Data = []map[string]any{} + } + + return nil +} + +func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + var request GraphQLRequest + switch { + case req.URL.Query().Get("query") != "": + request.Query = req.URL.Query().Get("query") + case req.Body != nil: + if err := requestJSON(req, &request); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + default: + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrMissingRequest}) + return + } + result := store.ExecRequest(req.Context(), request.Query) + + if result.Pub == nil { + responseJSON(rw, http.StatusOK, GraphQLResponse{result.GQL.Data, result.GQL.Errors}) + return + } + flusher, ok := rw.(http.Flusher) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrStreamingNotSupported}) + return + } + + rw.Header().Add("Content-Type", "text/event-stream") + rw.Header().Add("Cache-Control", "no-cache") + rw.Header().Add("Connection", "keep-alive") + + rw.WriteHeader(http.StatusOK) + flusher.Flush() + + for { + select { + case <-req.Context().Done(): + return + case item, open := <-result.Pub.Stream(): + if !open { + return + } + data, err := json.Marshal(item) + if err != nil { + return + } + fmt.Fprintf(rw, "data: %s\n\n", data) + flusher.Flush() + } + } +} diff --git a/http/handler_tx.go b/http/handler_tx.go new file mode 100644 index 0000000000..b7f1c82545 --- /dev/null +++ b/http/handler_tx.go @@ -0,0 +1,95 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + "strconv" + "sync" + + "github.com/go-chi/chi/v5" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +type txHandler struct{} + +type CreateTxResponse struct { + ID uint64 `json:"id"` +} + +func (h *txHandler) NewTxn(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) + txs := req.Context().Value(txsContextKey).(*sync.Map) + readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only")) + + tx, err := db.NewTxn(req.Context(), readOnly) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + txs.Store(tx.ID(), tx) + responseJSON(rw, http.StatusOK, &CreateTxResponse{tx.ID()}) +} + +func (h *txHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) + txs := req.Context().Value(txsContextKey).(*sync.Map) + readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only")) + + tx, err := db.NewConcurrentTxn(req.Context(), readOnly) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + txs.Store(tx.ID(), tx) + responseJSON(rw, http.StatusOK, &CreateTxResponse{tx.ID()}) +} + +func (h *txHandler) Commit(rw http.ResponseWriter, req *http.Request) { + txs := req.Context().Value(txsContextKey).(*sync.Map) + + txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId}) + return + } + txVal, ok := txs.Load(txId) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId}) + return + } + err = txVal.(datastore.Txn).Commit(req.Context()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + txs.Delete(txId) + rw.WriteHeader(http.StatusOK) +} + +func (h *txHandler) Discard(rw http.ResponseWriter, req *http.Request) { + txs := req.Context().Value(txsContextKey).(*sync.Map) + + txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId}) + return + } + txVal, ok := txs.LoadAndDelete(txId) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId}) + return + } + txVal.(datastore.Txn).Discard(req.Context()) + rw.WriteHeader(http.StatusOK) +} diff --git a/http/http_client.go b/http/http_client.go new file mode 100644 index 0000000000..48323607ab --- /dev/null +++ b/http/http_client.go @@ -0,0 +1,86 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" +) + +type httpClient struct { + client *http.Client + baseURL *url.URL + txValue string +} + +func newHttpClient(baseURL *url.URL) *httpClient { + client := httpClient{ + client: http.DefaultClient, + baseURL: baseURL, + } + return &client +} + +func (c *httpClient) withTxn(value uint64) *httpClient { + return &httpClient{ + client: c.client, + baseURL: c.baseURL, + txValue: fmt.Sprintf("%d", value), + } +} + +func (c *httpClient) setDefaultHeaders(req *http.Request) { + req.Header.Set("Accept", "application/json") + req.Header.Set("Content-Type", "application/json") + + if c.txValue != "" { + req.Header.Set(TX_HEADER_NAME, c.txValue) + } +} + +func (c *httpClient) request(req *http.Request) ([]byte, error) { + c.setDefaultHeaders(req) + + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + // ignore close errors because they have + // no perceivable effect on the end user + // and cannot be reconciled easily + defer res.Body.Close() //nolint:errcheck + + data, err := io.ReadAll(res.Body) + if err != nil { + return nil, err + } + // request was successful + if res.StatusCode == http.StatusOK { + return data, nil + } + // attempt to parse json error + var errRes errorResponse + if err := json.Unmarshal(data, &errRes); err != nil { + return nil, fmt.Errorf("%s", data) + } + return nil, errRes.Error +} + +func (c *httpClient) requestJson(req *http.Request, out any) error { + data, err := c.request(req) + if err != nil { + return err + } + return json.Unmarshal(data, out) +} diff --git a/http/logger.go b/http/logger.go new file mode 100644 index 0000000000..d23f65e94a --- /dev/null +++ b/http/logger.go @@ -0,0 +1,52 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + "time" + + "github.com/go-chi/chi/v5/middleware" + + "github.com/sourcenetwork/defradb/logging" +) + +var log = logging.MustNewLogger("http") + +type logEntry struct { + req *http.Request +} + +var _ middleware.LogEntry = (*logEntry)(nil) + +func (e *logEntry) Write(status, bytes int, header http.Header, elapsed time.Duration, extra any) { + log.Info( + e.req.Context(), + "Request", + logging.NewKV("Method", e.req.Method), + logging.NewKV("Path", e.req.URL.Path), + logging.NewKV("Status", status), + logging.NewKV("LengthBytes", bytes), + logging.NewKV("ElapsedTime", elapsed.String()), + ) +} + +func (e *logEntry) Panic(v any, stack []byte) { + middleware.PrintPrettyStack(v) +} + +type logFormatter struct{} + +var _ middleware.LogFormatter = (*logFormatter)(nil) + +func (f *logFormatter) NewLogEntry(req *http.Request) middleware.LogEntry { + return &logEntry{req} +} diff --git a/http/middleware.go b/http/middleware.go new file mode 100644 index 0000000000..28f1e0ff1e --- /dev/null +++ b/http/middleware.go @@ -0,0 +1,145 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + "net/http" + "strconv" + "sync" + + "github.com/go-chi/chi/v5" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +const TX_HEADER_NAME = "x-defradb-tx" + +type contextKey string + +var ( + // txsContextKey is the context key for the transaction *sync.Map + txsContextKey = contextKey("txs") + // dbContextKey is the context key for the client.DB + dbContextKey = contextKey("db") + // txContextKey is the context key for the datastore.Txn + // + // This will only be set if a transaction id is specified. + txContextKey = contextKey("tx") + // storeContextKey is the context key for the client.Store + // + // If a transaction exists, all operations will be executed + // in the current transaction context. + storeContextKey = contextKey("store") + // lensContextKey is the context key for the client.LensRegistry + // + // If a transaction exists, all operations will be executed + // in the current transaction context. + lensContextKey = contextKey("lens") + // colContextKey is the context key for the client.Collection + // + // If a transaction exists, all operations will be executed + // in the current transaction context. + colContextKey = contextKey("col") +) + +// ApiMiddleware sets the required context values for all API requests. +func ApiMiddleware(db client.DB, txs *sync.Map) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + ctx := req.Context() + ctx = context.WithValue(ctx, dbContextKey, db) + ctx = context.WithValue(ctx, txsContextKey, txs) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) + } +} + +// TransactionMiddleware sets the transaction context for the current request. +func TransactionMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + txs := req.Context().Value(txsContextKey).(*sync.Map) + + txValue := req.Header.Get(TX_HEADER_NAME) + if txValue == "" { + next.ServeHTTP(rw, req) + return + } + id, err := strconv.ParseUint(txValue, 10, 64) + if err != nil { + next.ServeHTTP(rw, req) + return + } + tx, ok := txs.Load(id) + if !ok { + next.ServeHTTP(rw, req) + return + } + + ctx := context.WithValue(req.Context(), txContextKey, tx) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) +} + +// StoreMiddleware sets the db context for the current request. +func StoreMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) + + var store client.Store + if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { + store = db.WithTxn(tx) + } else { + store = db + } + + ctx := context.WithValue(req.Context(), storeContextKey, store) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) +} + +// LensMiddleware sets the lens context for the current request. +func LensMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + var lens client.LensRegistry + if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { + lens = store.LensRegistry().WithTxn(tx) + } else { + lens = store.LensRegistry() + } + + ctx := context.WithValue(req.Context(), lensContextKey, lens) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) +} + +// CollectionMiddleware sets the collection context for the current request. +func CollectionMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + col, err := store.GetCollectionByName(req.Context(), chi.URLParam(req, "name")) + if err != nil { + rw.WriteHeader(http.StatusNotFound) + return + } + + if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + + ctx := context.WithValue(req.Context(), colContextKey, col) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) +} diff --git a/http/server.go b/http/server.go new file mode 100644 index 0000000000..afee4b9217 --- /dev/null +++ b/http/server.go @@ -0,0 +1,111 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + "sync" + + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + + "github.com/sourcenetwork/defradb/client" +) + +type Server struct { + db client.DB + router *chi.Mux + txs *sync.Map +} + +func NewServer(db client.DB) *Server { + txs := &sync.Map{} + + tx_handler := &txHandler{} + store_handler := &storeHandler{} + collection_handler := &collectionHandler{} + lens_handler := &lensHandler{} + + router := chi.NewRouter() + router.Use(middleware.RequestLogger(&logFormatter{})) + router.Use(middleware.Recoverer) + + router.Route("/api/v0", func(api chi.Router) { + api.Use(ApiMiddleware(db, txs), TransactionMiddleware, StoreMiddleware) + api.Route("/tx", func(tx chi.Router) { + tx.Post("/", tx_handler.NewTxn) + tx.Post("/concurrent", tx_handler.NewConcurrentTxn) + tx.Post("/{id}", tx_handler.Commit) + tx.Delete("/{id}", tx_handler.Discard) + }) + api.Route("/backup", func(backup chi.Router) { + backup.Post("/export", store_handler.BasicExport) + backup.Post("/import", store_handler.BasicImport) + }) + api.Route("/schema", func(schema chi.Router) { + schema.Post("/", store_handler.AddSchema) + schema.Patch("/", store_handler.PatchSchema) + }) + api.Route("/collections", func(collections chi.Router) { + collections.Get("/", store_handler.GetCollection) + // with collection middleware + collections_tx := collections.With(CollectionMiddleware) + collections_tx.Get("/{name}", collection_handler.GetAllDocKeys) + collections_tx.Post("/{name}", collection_handler.Create) + collections_tx.Patch("/{name}", collection_handler.UpdateWith) + collections_tx.Delete("/{name}", collection_handler.DeleteWith) + collections_tx.Post("/{name}/indexes", collection_handler.CreateIndex) + collections_tx.Get("/{name}/indexes", collection_handler.GetIndexes) + collections_tx.Delete("/{name}/indexes/{index}", collection_handler.DropIndex) + collections_tx.Get("/{name}/{key}", collection_handler.Get) + collections_tx.Patch("/{name}/{key}", collection_handler.Update) + collections_tx.Delete("/{name}/{key}", collection_handler.Delete) + }) + api.Route("/lens", func(lens chi.Router) { + lens.Use(LensMiddleware) + lens.Get("/", lens_handler.Config) + lens.Post("/", lens_handler.SetMigration) + lens.Post("/reload", lens_handler.ReloadLenses) + lens.Get("/{version}", lens_handler.HasMigration) + lens.Post("/{version}/up", lens_handler.MigrateUp) + lens.Post("/{version}/down", lens_handler.MigrateDown) + }) + api.Route("/graphql", func(graphQL chi.Router) { + graphQL.Get("/", store_handler.ExecRequest) + graphQL.Post("/", store_handler.ExecRequest) + }) + api.Route("/p2p", func(p2p chi.Router) { + p2p.Route("/replicators", func(p2p_replicators chi.Router) { + p2p_replicators.Get("/", store_handler.GetAllReplicators) + p2p_replicators.Post("/", store_handler.SetReplicator) + p2p_replicators.Delete("/", store_handler.DeleteReplicator) + }) + p2p.Route("/collections", func(p2p_collections chi.Router) { + p2p_collections.Get("/", store_handler.GetAllP2PCollections) + p2p_collections.Post("/{id}", store_handler.AddP2PCollection) + p2p_collections.Delete("/{id}", store_handler.RemoveP2PCollection) + }) + }) + api.Route("/debug", func(debug chi.Router) { + debug.Get("/dump", store_handler.PrintDump) + }) + }) + + return &Server{ + db: db, + router: router, + txs: txs, + } +} + +func (s *Server) ServeHTTP(w http.ResponseWriter, req *http.Request) { + s.router.ServeHTTP(w, req) +} diff --git a/http/utils.go b/http/utils.go new file mode 100644 index 0000000000..a171e0ed38 --- /dev/null +++ b/http/utils.go @@ -0,0 +1,65 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore/badger/v4" +) + +func requestJSON(req *http.Request, out any) error { + data, err := io.ReadAll(req.Body) + if err != nil { + return err + } + return json.Unmarshal(data, out) +} + +func responseJSON(rw http.ResponseWriter, status int, out any) { + rw.Header().Add("Content-Type", "application/json") + rw.WriteHeader(status) + json.NewEncoder(rw).Encode(out) //nolint:errcheck +} + +func documentJSON(doc *client.Document) ([]byte, error) { + docMap, err := doc.ToMap() + if err != nil { + return nil, err + } + delete(docMap, "_key") + + for field, value := range doc.Values() { + if !value.IsDirty() { + delete(docMap, field.Name()) + } + if value.IsDelete() { + docMap[field.Name()] = nil + } + } + + return json.Marshal(docMap) +} + +func parseError(msg any) error { + switch msg { + case client.ErrDocumentNotFound.Error(): + return client.ErrDocumentNotFound + case badger.ErrTxnConflict.Error(): + return badger.ErrTxnConflict + default: + return fmt.Errorf("%s", msg) + } +} diff --git a/http/wrapper.go b/http/wrapper.go new file mode 100644 index 0000000000..558dc79474 --- /dev/null +++ b/http/wrapper.go @@ -0,0 +1,177 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + "fmt" + "net/http/httptest" + + blockstore "github.com/ipfs/boxo/blockstore" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/events" +) + +var _ client.DB = (*Wrapper)(nil) + +// Wrapper combines an HTTP client and server into a +// single struct that implements the client.DB interface. +type Wrapper struct { + db client.DB + server *Server + client *Client + httpServer *httptest.Server +} + +func NewWrapper(db client.DB) (*Wrapper, error) { + server := NewServer(db) + httpServer := httptest.NewServer(server) + + client, err := NewClient(httpServer.URL) + if err != nil { + return nil, err + } + + return &Wrapper{ + db, + server, + client, + httpServer, + }, nil +} + +func (w *Wrapper) SetReplicator(ctx context.Context, rep client.Replicator) error { + return w.client.SetReplicator(ctx, rep) +} + +func (w *Wrapper) DeleteReplicator(ctx context.Context, rep client.Replicator) error { + return w.client.DeleteReplicator(ctx, rep) +} + +func (w *Wrapper) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { + return w.client.GetAllReplicators(ctx) +} + +func (w *Wrapper) AddP2PCollection(ctx context.Context, collectionID string) error { + return w.client.AddP2PCollection(ctx, collectionID) +} + +func (w *Wrapper) RemoveP2PCollection(ctx context.Context, collectionID string) error { + return w.client.RemoveP2PCollection(ctx, collectionID) +} + +func (w *Wrapper) GetAllP2PCollections(ctx context.Context) ([]string, error) { + return w.client.GetAllP2PCollections(ctx) +} + +func (w *Wrapper) BasicImport(ctx context.Context, filepath string) error { + return w.client.BasicImport(ctx, filepath) +} + +func (w *Wrapper) BasicExport(ctx context.Context, config *client.BackupConfig) error { + return w.client.BasicExport(ctx, config) +} + +func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { + return w.client.AddSchema(ctx, schema) +} + +func (w *Wrapper) PatchSchema(ctx context.Context, patch string) error { + return w.client.PatchSchema(ctx, patch) +} + +func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) error { + return w.client.SetMigration(ctx, config) +} + +func (w *Wrapper) LensRegistry() client.LensRegistry { + return w.client.LensRegistry() +} + +func (w *Wrapper) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { + return w.client.GetCollectionByName(ctx, name) +} + +func (w *Wrapper) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { + return w.client.GetCollectionBySchemaID(ctx, schemaId) +} + +func (w *Wrapper) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { + return w.client.GetCollectionByVersionID(ctx, versionId) +} + +func (w *Wrapper) GetAllCollections(ctx context.Context) ([]client.Collection, error) { + return w.client.GetAllCollections(ctx) +} + +func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { + return w.client.GetAllIndexes(ctx) +} + +func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.RequestResult { + return w.client.ExecRequest(ctx, query) +} + +func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + client, err := w.client.NewTxn(ctx, readOnly) + if err != nil { + return nil, err + } + server, ok := w.server.txs.Load(client.ID()) + if !ok { + return nil, fmt.Errorf("failed to get server transaction") + } + return &TxWrapper{server.(datastore.Txn), client}, nil +} + +func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + client, err := w.client.NewConcurrentTxn(ctx, readOnly) + if err != nil { + return nil, err + } + server, ok := w.server.txs.Load(client.ID()) + if !ok { + return nil, fmt.Errorf("failed to get server transaction") + } + return &TxWrapper{server.(datastore.Txn), client}, nil +} + +func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { + return w.client.WithTxn(tx) +} + +func (w *Wrapper) Root() datastore.RootStore { + return w.db.Root() +} + +func (w *Wrapper) Blockstore() blockstore.Blockstore { + return w.db.Blockstore() +} + +func (w *Wrapper) Close(ctx context.Context) { + w.httpServer.CloseClientConnections() + w.httpServer.Close() + w.db.Close(ctx) +} + +func (w *Wrapper) Events() events.Events { + return w.db.Events() +} + +func (w *Wrapper) MaxTxnRetries() int { + return w.db.MaxTxnRetries() +} + +func (w *Wrapper) PrintDump(ctx context.Context) error { + return w.db.PrintDump(ctx) +} diff --git a/http/wrapper_tx.go b/http/wrapper_tx.go new file mode 100644 index 0000000000..7c77b938f5 --- /dev/null +++ b/http/wrapper_tx.go @@ -0,0 +1,70 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + + "github.com/sourcenetwork/defradb/datastore" +) + +var _ datastore.Txn = (*TxWrapper)(nil) + +// TxWrapper combines a client and server transaction into +// a single struct that implements the datastore.Txn interface. +type TxWrapper struct { + server datastore.Txn + client datastore.Txn +} + +func (w *TxWrapper) ID() uint64 { + return w.client.ID() +} + +func (w *TxWrapper) Commit(ctx context.Context) error { + return w.client.Commit(ctx) +} + +func (w *TxWrapper) Discard(ctx context.Context) { + w.client.Discard(ctx) +} + +func (w *TxWrapper) OnSuccess(fn func()) { + w.server.OnSuccess(fn) +} + +func (w *TxWrapper) OnError(fn func()) { + w.server.OnError(fn) +} + +func (w *TxWrapper) OnDiscard(fn func()) { + w.server.OnDiscard(fn) +} + +func (w *TxWrapper) Rootstore() datastore.DSReaderWriter { + return w.server.Rootstore() +} + +func (w *TxWrapper) Datastore() datastore.DSReaderWriter { + return w.server.Datastore() +} + +func (w *TxWrapper) Headstore() datastore.DSReaderWriter { + return w.server.Headstore() +} + +func (w *TxWrapper) DAGstore() datastore.DAGStore { + return w.server.DAGstore() +} + +func (w *TxWrapper) Systemstore() datastore.DSReaderWriter { + return w.server.Systemstore() +} diff --git a/lens/fetcher.go b/lens/fetcher.go index bfd8fca3bc..ee01aa7983 100644 --- a/lens/fetcher.go +++ b/lens/fetcher.go @@ -76,15 +76,25 @@ func (f *lensedFetcher) Init( f.fieldDescriptionsByName[field.Name] = field } - history, err := getTargetedSchemaHistory(ctx, txn, f.registry.Config(), f.col.Schema.SchemaID, f.col.Schema.VersionID) + cfg, err := f.registry.Config(ctx) if err != nil { return err } - f.lens = new(f.registry, f.col.Schema.VersionID, history) + + history, err := getTargetedSchemaHistory(ctx, txn, cfg, f.col.Schema.SchemaID, f.col.Schema.VersionID) + if err != nil { + return err + } + f.lens = new(ctx, f.registry, f.col.Schema.VersionID, history) f.txn = txn for schemaVersionID := range history { - if f.registry.HasMigration(schemaVersionID) { + hasMigration, err := f.registry.HasMigration(ctx, schemaVersionID) + if err != nil { + return err + } + + if hasMigration { f.hasMigrations = true break } @@ -109,13 +119,7 @@ func (f *lensedFetcher) Start(ctx context.Context, spans core.Spans) error { } func (f *lensedFetcher) FetchNext(ctx context.Context) (fetcher.EncodedDocument, fetcher.ExecInfo, error) { - panic("This function is never called and is dead code. As this type is internal, panicing is okay for now") -} - -func (f *lensedFetcher) FetchNextDecoded( - ctx context.Context, -) (*client.Document, fetcher.ExecInfo, error) { - doc, execInfo, err := f.source.FetchNextDecoded(ctx) + doc, execInfo, err := f.source.FetchNext(ctx) if err != nil { return nil, fetcher.ExecInfo{}, err } @@ -124,18 +128,18 @@ func (f *lensedFetcher) FetchNextDecoded( return nil, execInfo, nil } - if !f.hasMigrations || doc.SchemaVersionID == f.targetVersionID { + if !f.hasMigrations || doc.SchemaVersionID() == f.targetVersionID { // If there are no migrations registered for this schema, or if the document is already // at the target schema version, no migration is required and we can return it early. return doc, execInfo, nil } - sourceLensDoc, err := clientDocToLensDoc(doc) + sourceLensDoc, err := encodedDocToLensDoc(doc) if err != nil { return nil, fetcher.ExecInfo{}, err } - err = f.lens.Put(doc.SchemaVersionID, sourceLensDoc) + err = f.lens.Put(doc.SchemaVersionID(), sourceLensDoc) if err != nil { return nil, fetcher.ExecInfo{}, err } @@ -146,7 +150,7 @@ func (f *lensedFetcher) FetchNextDecoded( } if !hasNext { // The migration decided to not yield a document, so we cycle through the next fetcher doc - doc, nextExecInfo, err := f.FetchNextDecoded(ctx) + doc, nextExecInfo, err := f.FetchNext(ctx) execInfo.Add(nextExecInfo) return doc, execInfo, err } @@ -156,7 +160,7 @@ func (f *lensedFetcher) FetchNextDecoded( return nil, fetcher.ExecInfo{}, err } - migratedDoc, err := f.lensDocToClientDoc(migratedLensDoc) + migratedDoc, err := f.lensDocToEncodedDoc(migratedLensDoc) if err != nil { return nil, fetcher.ExecInfo{}, err } @@ -169,63 +173,6 @@ func (f *lensedFetcher) FetchNextDecoded( return migratedDoc, execInfo, nil } -func (f *lensedFetcher) FetchNextDoc( - ctx context.Context, - mapping *core.DocumentMapping, -) ([]byte, core.Doc, fetcher.ExecInfo, error) { - key, doc, execInfo, err := f.source.FetchNextDoc(ctx, mapping) - if err != nil { - return nil, core.Doc{}, fetcher.ExecInfo{}, err - } - - if len(doc.Fields) == 0 { - return key, doc, execInfo, nil - } - - if doc.SchemaVersionID == f.targetVersionID { - // If the document is already at the target schema version, no migration is required and - // we can return it early. - return key, doc, execInfo, nil - } - - sourceLensDoc, err := coreDocToLensDoc(mapping, doc) - if err != nil { - return nil, core.Doc{}, fetcher.ExecInfo{}, err - } - err = f.lens.Put(doc.SchemaVersionID, sourceLensDoc) - if err != nil { - return nil, core.Doc{}, fetcher.ExecInfo{}, err - } - - hasNext, err := f.lens.Next() - if err != nil { - return nil, core.Doc{}, fetcher.ExecInfo{}, err - } - if !hasNext { - // The migration decided to not yield a document, so we cycle through the next fetcher doc - key, doc, nextExecInfo, err := f.FetchNextDoc(ctx, mapping) - execInfo.Add(nextExecInfo) - return key, doc, execInfo, err - } - - migratedLensDoc, err := f.lens.Value() - if err != nil { - return nil, core.Doc{}, fetcher.ExecInfo{}, err - } - - migratedDoc, err := f.lensDocToCoreDoc(mapping, migratedLensDoc) - if err != nil { - return nil, core.Doc{}, fetcher.ExecInfo{}, err - } - - err = f.updateDataStore(ctx, sourceLensDoc, migratedLensDoc) - if err != nil { - return nil, core.Doc{}, fetcher.ExecInfo{}, err - } - - return key, migratedDoc, execInfo, nil -} - func (f *lensedFetcher) Close() error { if f.lens != nil { f.lens.Reset() @@ -233,14 +180,19 @@ func (f *lensedFetcher) Close() error { return f.source.Close() } -// clientDocToLensDoc converts a client.Document to a LensDoc. -func clientDocToLensDoc(doc *client.Document) (LensDoc, error) { +// encodedDocToLensDoc converts a [fetcher.EncodedDocument] to a LensDoc. +func encodedDocToLensDoc(doc fetcher.EncodedDocument) (LensDoc, error) { docAsMap := map[string]any{} - for field, fieldValue := range doc.Values() { - docAsMap[field.Name()] = fieldValue.Value() + properties, err := doc.Properties(false) + if err != nil { + return nil, err } - docAsMap[request.KeyFieldName] = doc.Key().String() + + for field, fieldValue := range properties { + docAsMap[field.Name] = fieldValue + } + docAsMap[request.KeyFieldName] = string(doc.Key()) // Note: client.Document does not have a means of flagging as to whether it is // deleted or not, and, currently the fetcher does not ever returned deleted items @@ -249,79 +201,23 @@ func clientDocToLensDoc(doc *client.Document) (LensDoc, error) { return docAsMap, nil } -// coreDocToLensDoc converts a core.Doc to a LensDoc. -func coreDocToLensDoc(mapping *core.DocumentMapping, doc core.Doc) (LensDoc, error) { - docAsMap := map[string]any{} - - for fieldIndex, fieldValue := range doc.Fields { - fieldName, ok := mapping.TryToFindNameFromIndex(fieldIndex) - if !ok { - continue - } - docAsMap[fieldName] = fieldValue - } - - docAsMap[request.DeletedFieldName] = doc.Status.IsDeleted() - - return docAsMap, nil -} - -// lensDocToCoreDoc converts a LensDoc to a core.Doc. -func (f *lensedFetcher) lensDocToCoreDoc(mapping *core.DocumentMapping, docAsMap LensDoc) (core.Doc, error) { - doc := mapping.NewDoc() +func (f *lensedFetcher) lensDocToEncodedDoc(docAsMap LensDoc) (fetcher.EncodedDocument, error) { + var key string + status := client.Active + properties := map[client.FieldDescription]any{} for fieldName, fieldByteValue := range docAsMap { if fieldName == request.KeyFieldName { - key, ok := fieldByteValue.(string) - if !ok { - return core.Doc{}, core.ErrInvalidKey - } - - doc.SetKey(key) + key = fieldByteValue.(string) continue } - fieldDesc, fieldFound := f.fieldDescriptionsByName[fieldName] - if !fieldFound { - // Note: This can technically happen if a Lens migration returns a field that - // we do not know about. In which case we have to skip it. - continue - } - - fieldValue, err := core.DecodeFieldValue(fieldDesc, fieldByteValue) - if err != nil { - return core.Doc{}, err - } - - index := mapping.FirstIndexOfName(fieldName) - doc.Fields[index] = fieldValue - } - - if value, ok := docAsMap[request.DeletedFieldName]; ok { - if wasDeleted, ok := value.(bool); ok { - if wasDeleted { - doc.Status = client.Deleted - } else { - doc.Status = client.Active + if fieldName == request.DeletedFieldName { + if wasDeleted, ok := fieldByteValue.(bool); ok { + if wasDeleted { + status = client.Deleted + } } - } - } - - doc.SchemaVersionID = f.col.Schema.VersionID - - return doc, nil -} - -// lensDocToClientDoc converts a LensDoc to a client.Document. -func (f *lensedFetcher) lensDocToClientDoc(docAsMap LensDoc) (*client.Document, error) { - key, err := client.NewDocKeyFromString(docAsMap[request.KeyFieldName].(string)) - if err != nil { - return nil, err - } - doc := client.NewDocWithKey(key) - - for fieldName, fieldByteValue := range docAsMap { - if fieldName == request.KeyFieldName { continue } @@ -337,19 +233,15 @@ func (f *lensedFetcher) lensDocToClientDoc(docAsMap LensDoc) (*client.Document, return nil, err } - err = doc.SetAs(fieldDesc.Name, fieldValue, fieldDesc.Typ) - if err != nil { - return nil, err - } + properties[fieldDesc] = fieldValue } - doc.SchemaVersionID = f.col.Schema.VersionID - - // Note: client.Document does not have a means of flagging as to whether it is - // deleted or not, and, currently the fetcher does not ever returned deleted items - // from the function that returs this type. - - return doc, nil + return &lensEncodedDocument{ + key: []byte(key), + schemaVersionID: f.col.Schema.VersionID, + status: status, + properties: properties, + }, nil } // updateDataStore updates the datastore with the migrated values. @@ -424,3 +316,35 @@ func (f *lensedFetcher) updateDataStore(ctx context.Context, original map[string return nil } + +type lensEncodedDocument struct { + key []byte + schemaVersionID string + status client.DocumentStatus + properties map[client.FieldDescription]any +} + +var _ fetcher.EncodedDocument = (*lensEncodedDocument)(nil) + +func (encdoc *lensEncodedDocument) Key() []byte { + return encdoc.key +} + +func (encdoc *lensEncodedDocument) SchemaVersionID() string { + return encdoc.schemaVersionID +} + +func (encdoc *lensEncodedDocument) Status() client.DocumentStatus { + return encdoc.status +} + +func (encdoc *lensEncodedDocument) Properties(onlyFilterProps bool) (map[client.FieldDescription]any, error) { + return encdoc.properties, nil +} + +func (encdoc *lensEncodedDocument) Reset() { + encdoc.key = nil + encdoc.schemaVersionID = "" + encdoc.status = 0 + encdoc.properties = map[client.FieldDescription]any{} +} diff --git a/lens/lens.go b/lens/lens.go index 50549542b8..86fcb0876f 100644 --- a/lens/lens.go +++ b/lens/lens.go @@ -11,6 +11,8 @@ package lens import ( + "context" + "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" @@ -42,6 +44,8 @@ type Lens interface { type lens struct { lensRegistry client.LensRegistry + ctx context.Context + // The primary access points to the lens pipes through which all things flow. lensPipesBySchemaVersionIDs map[schemaVersionID]enumerable.Concatenation[LensDoc] @@ -49,7 +53,8 @@ type lens struct { lensInputPipesBySchemaVersionIDs map[schemaVersionID]enumerable.Queue[LensDoc] // The output pipe, through which all outputs must exit. - outputPipe enumerable.Concatenation[LensDoc] + outputPipe enumerable.Concatenation[LensDoc] + unknownVersionPipe enumerable.Queue[LensDoc] schemaVersionHistory map[schemaVersionID]*targetedSchemaHistoryLink @@ -59,6 +64,7 @@ type lens struct { var _ Lens = (*lens)(nil) func new( + ctx context.Context, lensRegistry client.LensRegistry, targetSchemaVersionID schemaVersionID, schemaVersionHistory map[schemaVersionID]*targetedSchemaHistoryLink, @@ -68,8 +74,10 @@ func new( return &lens{ lensRegistry: lensRegistry, + ctx: ctx, source: enumerable.NewQueue[lensInput](), outputPipe: outputPipe, + unknownVersionPipe: targetSource, schemaVersionHistory: schemaVersionHistory, lensInputPipesBySchemaVersionIDs: map[schemaVersionID]enumerable.Queue[LensDoc]{ targetSchemaVersionID: targetSource, @@ -128,7 +136,18 @@ func (l *lens) Next() (bool, error) { // up to the output via any intermediary pipes. inputPipe = p } else { - historyLocation := l.schemaVersionHistory[doc.SchemaVersionID] + historyLocation, ok := l.schemaVersionHistory[doc.SchemaVersionID] + if !ok { + // We may recieve documents of unknown schema versions, they should + // still be fed through the pipe system in order to preserve order. + err = l.unknownVersionPipe.Put(doc.Doc) + if err != nil { + return false, err + } + + return l.outputPipe.Next() + } + var pipeHead enumerable.Enumerable[LensDoc] for { @@ -162,25 +181,21 @@ func (l *lens) Next() (bool, error) { // Aquire a lens migration from the registery, using the junctionPipe as its source. // The new pipeHead will then be connected as a source to the next migration-stage on // the next loop. - pipeHead, err = l.lensRegistry.MigrateUp(junctionPipe, historyLocation.schemaVersionID) + pipeHead, err = l.lensRegistry.MigrateUp(l.ctx, junctionPipe, historyLocation.schemaVersionID) if err != nil { return false, err } historyLocation = historyLocation.next.Value() } else { - // The pipe head then becomes the schema version migration to the next version - // sourcing from any documents at schemaVersionID, or lower schema versions. - // This also ensures each document only passes through each migration once, - // in order, and through the same state container (in case migrations use state). - pipeHead, err = l.lensRegistry.MigrateDown(junctionPipe, historyLocation.schemaVersionID) + // Aquire a lens migration from the registery, using the junctionPipe as its source. + // The new pipeHead will then be connected as a source to the next migration-stage on + // the next loop. + pipeHead, err = l.lensRegistry.MigrateDown(l.ctx, junctionPipe, historyLocation.previous.Value().schemaVersionID) if err != nil { return false, err } - // Aquire a lens migration from the registery, using the junctionPipe as its source. - // The new pipeHead will then be connected as a source to the next migration-stage on - // the next loop. historyLocation = historyLocation.previous.Value() } } diff --git a/lens/registry.go b/lens/registry.go index aee26104ef..a4074ca7f4 100644 --- a/lens/registry.go +++ b/lens/registry.go @@ -17,6 +17,7 @@ import ( "github.com/ipfs/go-datastore/query" "github.com/lens-vm/lens/host-go/config" + "github.com/lens-vm/lens/host-go/config/model" "github.com/lens-vm/lens/host-go/engine/module" "github.com/lens-vm/lens/host-go/runtimes/wazero" "github.com/sourcenetwork/immutable" @@ -45,13 +46,45 @@ type lensRegistry struct { modulesByPath map[string]module.Module moduleLock sync.Mutex - lensPoolsBySchemaVersionID map[string]*lensPool + lensPoolsBySchemaVersionID map[string]*lensPool + reversedPoolsBySchemaVersionID map[string]*lensPool + poolLock sync.RWMutex // lens configurations by source schema version ID - configs map[string]client.LensConfig + configs map[string]client.LensConfig + configLock sync.RWMutex + + // Writable transaction contexts by transaction ID. + // + // Read-only transaction contexts are not tracked. + txnCtxs map[uint64]*txnContext + txnLock sync.RWMutex } -var _ client.LensRegistry = (*lensRegistry)(nil) +// txnContext contains uncommitted transaction state tracked by the registry, +// stuff within here should be accessible from within this transaction but not +// from outside. +type txnContext struct { + txn datastore.Txn + lensPoolsBySchemaVersionID map[string]*lensPool + reversedPoolsBySchemaVersionID map[string]*lensPool + configs map[string]client.LensConfig +} + +func newTxnCtx(txn datastore.Txn) *txnContext { + return &txnContext{ + txn: txn, + lensPoolsBySchemaVersionID: map[string]*lensPool{}, + reversedPoolsBySchemaVersionID: map[string]*lensPool{}, + configs: map[string]client.LensConfig{}, + } +} + +// TxnSource represents an object capable of constructing the transactions that +// implicit-transaction registries need internally. +type TxnSource interface { + NewTxn(context.Context, bool) (datastore.Txn, error) +} // DefaultPoolSize is the default size of the lens pool for each schema version. const DefaultPoolSize int = 5 @@ -59,7 +92,7 @@ const DefaultPoolSize int = 5 // NewRegistry instantiates a new registery. // // It will be of size 5 (per schema version) if a size is not provided. -func NewRegistry(lensPoolSize immutable.Option[int]) *lensRegistry { +func NewRegistry(lensPoolSize immutable.Option[int], db TxnSource) client.LensRegistry { var size int if lensPoolSize.HasValue() { size = lensPoolSize.Value() @@ -67,16 +100,76 @@ func NewRegistry(lensPoolSize immutable.Option[int]) *lensRegistry { size = DefaultPoolSize } - return &lensRegistry{ - poolSize: size, - runtime: wazero.New(), - modulesByPath: map[string]module.Module{}, - lensPoolsBySchemaVersionID: map[string]*lensPool{}, - configs: map[string]client.LensConfig{}, + return &implicitTxnLensRegistry{ + db: db, + registry: &lensRegistry{ + poolSize: size, + runtime: wazero.New(), + modulesByPath: map[string]module.Module{}, + lensPoolsBySchemaVersionID: map[string]*lensPool{}, + reversedPoolsBySchemaVersionID: map[string]*lensPool{}, + configs: map[string]client.LensConfig{}, + txnCtxs: map[uint64]*txnContext{}, + }, } } -func (r *lensRegistry) SetMigration(ctx context.Context, txn datastore.Txn, cfg client.LensConfig) error { +func (r *lensRegistry) getCtx(txn datastore.Txn, readonly bool) *txnContext { + r.txnLock.RLock() + if txnCtx, ok := r.txnCtxs[txn.ID()]; ok { + r.txnLock.RUnlock() + return txnCtx + } + r.txnLock.RUnlock() + + txnCtx := newTxnCtx(txn) + if readonly { + return txnCtx + } + + r.txnLock.Lock() + r.txnCtxs[txn.ID()] = txnCtx + r.txnLock.Unlock() + + txnCtx.txn.OnSuccess(func() { + r.poolLock.Lock() + for schemaVersionID, locker := range txnCtx.lensPoolsBySchemaVersionID { + r.lensPoolsBySchemaVersionID[schemaVersionID] = locker + } + for schemaVersionID, locker := range txnCtx.reversedPoolsBySchemaVersionID { + r.reversedPoolsBySchemaVersionID[schemaVersionID] = locker + } + r.poolLock.Unlock() + + r.configLock.Lock() + for schemaVersionID, cfg := range txnCtx.configs { + r.configs[schemaVersionID] = cfg + } + r.configLock.Unlock() + + r.txnLock.Lock() + delete(r.txnCtxs, txn.ID()) + r.txnLock.Unlock() + }) + + txn.OnError(func() { + r.txnLock.Lock() + delete(r.txnCtxs, txn.ID()) + r.txnLock.Unlock() + }) + + txn.OnDiscard(func() { + // Delete it to help reduce the build up of memory, the txnCtx will be re-contructed if the + // txn is reused after discard. + r.txnLock.Lock() + delete(r.txnCtxs, txn.ID()) + r.txnLock.Unlock() + }) + + return txnCtx +} + +func (r *lensRegistry) setMigration(ctx context.Context, txnCtx *txnContext, cfg client.LensConfig) error { key := core.NewSchemaVersionMigrationKey(cfg.SourceSchemaVersionID) json, err := json.Marshal(cfg) @@ -84,12 +177,12 @@ func (r *lensRegistry) SetMigration(ctx context.Context, txn datastore.Txn, cfg return err } - err = txn.Systemstore().Put(ctx, key.ToDS(), json) + err = txnCtx.txn.Systemstore().Put(ctx, key.ToDS(), json) if err != nil { return err } - err = r.cacheLens(txn, cfg) + err = r.cacheLens(txnCtx, cfg) if err != nil { return err } @@ -97,51 +190,64 @@ func (r *lensRegistry) SetMigration(ctx context.Context, txn datastore.Txn, cfg return nil } -func (r *lensRegistry) cacheLens(txn datastore.Txn, cfg client.LensConfig) error { - locker, lockerAlreadyExists := r.lensPoolsBySchemaVersionID[cfg.SourceSchemaVersionID] - if !lockerAlreadyExists { - locker = r.newPool(r.poolSize, cfg) +func (r *lensRegistry) cacheLens(txnCtx *txnContext, cfg client.LensConfig) error { + inversedModuleCfgs := make([]model.LensModule, len(cfg.Lenses)) + for i, moduleCfg := range cfg.Lenses { + // Reverse the order of the lenses for the inverse migration. + inversedModuleCfgs[len(cfg.Lenses)-i-1] = model.LensModule{ + Path: moduleCfg.Path, + // Reverse the direction of the lens. + // This needs to be done on a clone of the original cfg or we may end up mutating + // the original. + Inverse: !moduleCfg.Inverse, + Arguments: moduleCfg.Arguments, + } } - newLensPipes := make([]*lensPipe, r.poolSize) - for i := 0; i < r.poolSize; i++ { - var err error - newLensPipes[i], err = r.newLensPipe(cfg) - if err != nil { - return err - } + reversedCfg := client.LensConfig{ + SourceSchemaVersionID: cfg.SourceSchemaVersionID, + DestinationSchemaVersionID: cfg.DestinationSchemaVersionID, + Lens: model.Lens{ + Lenses: inversedModuleCfgs, + }, } - // todo - handling txns like this means that the migrations are not available within the current - // transaction if used for stuff (e.g. GQL requests) before commit. - // https://github.com/sourcenetwork/defradb/issues/1592 - txn.OnSuccess(func() { - if !lockerAlreadyExists { - r.lensPoolsBySchemaVersionID[cfg.SourceSchemaVersionID] = locker - } + err := r.cachePool(txnCtx.txn, txnCtx.lensPoolsBySchemaVersionID, cfg) + if err != nil { + return err + } + err = r.cachePool(txnCtx.txn, txnCtx.reversedPoolsBySchemaVersionID, reversedCfg) + // For now, checking this error is the best way of determining if a migration has an inverse. + // Inverses are optional. + //nolint:revive + if err != nil && !errors.Is(errors.New("Export `inverse` does not exist"), err) { + return err + } - drainLoop: - for { - select { - case <-locker.pipes: - default: - break drainLoop - } - } + txnCtx.configs[cfg.SourceSchemaVersionID] = cfg - for _, lensPipe := range newLensPipes { - locker.returnLens(lensPipe) + return nil +} + +func (r *lensRegistry) cachePool(txn datastore.Txn, target map[string]*lensPool, cfg client.LensConfig) error { + pool := r.newPool(r.poolSize, cfg) + + for i := 0; i < r.poolSize; i++ { + lensPipe, err := r.newLensPipe(cfg) + if err != nil { + return err } + pool.returnLens(lensPipe) + } - r.configs[cfg.SourceSchemaVersionID] = cfg - }) + target[cfg.SourceSchemaVersionID] = pool return nil } -func (r *lensRegistry) ReloadLenses(ctx context.Context, txn datastore.Txn) error { +func (r *lensRegistry) reloadLenses(ctx context.Context, txnCtx *txnContext) error { prefix := core.NewSchemaVersionMigrationKey("") - q, err := txn.Systemstore().Query(ctx, query.Query{ + q, err := txnCtx.txn.Systemstore().Query(ctx, query.Query{ Prefix: prefix.ToString(), }) if err != nil { @@ -181,7 +287,7 @@ func (r *lensRegistry) ReloadLenses(ctx context.Context, txn datastore.Txn) erro return err } - err = r.cacheLens(txn, cfg) + err = r.cacheLens(txnCtx, cfg) if err != nil { err = q.Close() if err != nil { @@ -199,11 +305,29 @@ func (r *lensRegistry) ReloadLenses(ctx context.Context, txn datastore.Txn) erro return nil } -func (r *lensRegistry) MigrateUp( +func (r *lensRegistry) migrateUp( + txnCtx *txnContext, + src enumerable.Enumerable[LensDoc], + schemaVersionID string, +) (enumerable.Enumerable[LensDoc], error) { + return r.migrate(r.lensPoolsBySchemaVersionID, txnCtx.lensPoolsBySchemaVersionID, src, schemaVersionID) +} + +func (r *lensRegistry) migrateDown( + txnCtx *txnContext, src enumerable.Enumerable[LensDoc], schemaVersionID string, ) (enumerable.Enumerable[LensDoc], error) { - lensPool, ok := r.lensPoolsBySchemaVersionID[schemaVersionID] + return r.migrate(r.reversedPoolsBySchemaVersionID, txnCtx.reversedPoolsBySchemaVersionID, src, schemaVersionID) +} + +func (r *lensRegistry) migrate( + pools map[string]*lensPool, + txnPools map[string]*lensPool, + src enumerable.Enumerable[LensDoc], + schemaVersionID string, +) (enumerable.Enumerable[LensDoc], error) { + lensPool, ok := r.getPool(pools, txnPools, schemaVersionID) if !ok { // If there are no migrations for this schema version, just return the given source. return src, nil @@ -219,27 +343,48 @@ func (r *lensRegistry) MigrateUp( return lens, nil } -func (*lensRegistry) MigrateDown( - src enumerable.Enumerable[LensDoc], - schemaVersionID string, -) (enumerable.Enumerable[LensDoc], error) { - // todo: https://github.com/sourcenetwork/defradb/issues/1591 - return src, nil -} +func (r *lensRegistry) config(txnCtx *txnContext) []client.LensConfig { + configs := map[string]client.LensConfig{} + r.configLock.RLock() + for schemaVersionID, cfg := range r.configs { + configs[schemaVersionID] = cfg + } + r.configLock.RUnlock() + + // If within a txn actively writing to this registry overwrite + // values from the (commited) registry. + // Note: Config cannot be removed, only replaced at the moment. + for schemaVersionID, cfg := range txnCtx.configs { + configs[schemaVersionID] = cfg + } -func (r *lensRegistry) Config() []client.LensConfig { result := []client.LensConfig{} - for _, cfg := range r.configs { + for _, cfg := range configs { result = append(result, cfg) } return result } -func (r *lensRegistry) HasMigration(schemaVersionID string) bool { - _, hasMigration := r.lensPoolsBySchemaVersionID[schemaVersionID] +func (r *lensRegistry) hasMigration(txnCtx *txnContext, schemaVersionID string) bool { + _, hasMigration := r.getPool(r.lensPoolsBySchemaVersionID, txnCtx.lensPoolsBySchemaVersionID, schemaVersionID) return hasMigration } +func (r *lensRegistry) getPool( + pools map[string]*lensPool, + txnPools map[string]*lensPool, + schemaVersionID string, +) (*lensPool, bool) { + if pool, ok := txnPools[schemaVersionID]; ok { + return pool, true + } + + r.poolLock.RLock() + pool, ok := pools[schemaVersionID] + r.poolLock.RUnlock() + return pool, ok +} + // lensPool provides a pool-like mechanic for caching a limited number of wasm lens modules in // a thread safe fashion. // diff --git a/lens/txn_registry.go b/lens/txn_registry.go new file mode 100644 index 0000000000..954db01e0c --- /dev/null +++ b/lens/txn_registry.go @@ -0,0 +1,163 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package lens + +import ( + "context" + + "github.com/sourcenetwork/immutable/enumerable" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +type implicitTxnLensRegistry struct { + registry *lensRegistry + db TxnSource +} + +type explicitTxnLensRegistry struct { + registry *lensRegistry + txn datastore.Txn +} + +var _ client.LensRegistry = (*implicitTxnLensRegistry)(nil) +var _ client.LensRegistry = (*explicitTxnLensRegistry)(nil) + +func (r *implicitTxnLensRegistry) WithTxn(txn datastore.Txn) client.LensRegistry { + return &explicitTxnLensRegistry{ + registry: r.registry, + txn: txn, + } +} + +func (r *explicitTxnLensRegistry) WithTxn(txn datastore.Txn) client.LensRegistry { + return &explicitTxnLensRegistry{ + registry: r.registry, + txn: txn, + } +} + +func (r *implicitTxnLensRegistry) SetMigration(ctx context.Context, cfg client.LensConfig) error { + txn, err := r.db.NewTxn(ctx, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + txnCtx := r.registry.getCtx(txn, false) + + err = r.registry.setMigration(ctx, txnCtx, cfg) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (r *explicitTxnLensRegistry) SetMigration(ctx context.Context, cfg client.LensConfig) error { + return r.registry.setMigration(ctx, r.registry.getCtx(r.txn, false), cfg) +} + +func (r *implicitTxnLensRegistry) ReloadLenses(ctx context.Context) error { + txn, err := r.db.NewTxn(ctx, true) + if err != nil { + return err + } + defer txn.Discard(ctx) + txnCtx := r.registry.getCtx(txn, false) + + err = r.registry.reloadLenses(ctx, txnCtx) + if err != nil { + return err + } + + return txn.Commit(ctx) +} + +func (r *explicitTxnLensRegistry) ReloadLenses(ctx context.Context) error { + return r.registry.reloadLenses(ctx, r.registry.getCtx(r.txn, true)) +} + +func (r *implicitTxnLensRegistry) MigrateUp( + ctx context.Context, + src enumerable.Enumerable[LensDoc], + schemaVersionID string, +) (enumerable.Enumerable[map[string]any], error) { + txn, err := r.db.NewTxn(ctx, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + txnCtx := newTxnCtx(txn) + + return r.registry.migrateUp(txnCtx, src, schemaVersionID) +} + +func (r *explicitTxnLensRegistry) MigrateUp( + ctx context.Context, + src enumerable.Enumerable[LensDoc], + schemaVersionID string, +) (enumerable.Enumerable[map[string]any], error) { + return r.registry.migrateUp(r.registry.getCtx(r.txn, true), src, schemaVersionID) +} + +func (r *implicitTxnLensRegistry) MigrateDown( + ctx context.Context, + src enumerable.Enumerable[LensDoc], + schemaVersionID string, +) (enumerable.Enumerable[map[string]any], error) { + txn, err := r.db.NewTxn(ctx, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + txnCtx := newTxnCtx(txn) + + return r.registry.migrateDown(txnCtx, src, schemaVersionID) +} + +func (r *explicitTxnLensRegistry) MigrateDown( + ctx context.Context, + src enumerable.Enumerable[LensDoc], + schemaVersionID string, +) (enumerable.Enumerable[map[string]any], error) { + return r.registry.migrateDown(r.registry.getCtx(r.txn, true), src, schemaVersionID) +} + +func (r *implicitTxnLensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) { + txn, err := r.db.NewTxn(ctx, true) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + txnCtx := newTxnCtx(txn) + + return r.registry.config(txnCtx), nil +} + +func (r *explicitTxnLensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) { + return r.registry.config(r.registry.getCtx(r.txn, true)), nil +} + +func (r *implicitTxnLensRegistry) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { + txn, err := r.db.NewTxn(ctx, true) + if err != nil { + return false, err + } + defer txn.Discard(ctx) + txnCtx := newTxnCtx(txn) + + return r.registry.hasMigration(txnCtx, schemaVersionID), nil +} + +func (r *explicitTxnLensRegistry) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { + return r.registry.hasMigration(r.registry.getCtx(r.txn, true), schemaVersionID), nil +} diff --git a/licenses/BSL.txt b/licenses/BSL.txt index 093935cc02..1847b1aee1 100644 --- a/licenses/BSL.txt +++ b/licenses/BSL.txt @@ -7,7 +7,7 @@ Parameters Licensor: Democratized Data (D2) Foundation -Licensed Work: DefraDB v0.6.0 +Licensed Work: DefraDB v0.7.0 The Licensed Work is (c) 2023 D2 Foundation. @@ -28,7 +28,7 @@ Additional Use Grant: You may only use the Licensed Work for the -Change Date: 2027-07-31 +Change Date: 2027-09-18 Change License: Apache License, Version 2.0 diff --git a/logging/logger.go b/logging/logger.go index 9b9bb20e35..f93e305fce 100644 --- a/logging/logger.go +++ b/logging/logger.go @@ -290,7 +290,7 @@ func (l *goLogger) ApplyConfig(config Config) { } // goLoggerV2 is a wrapper for a go-log V2 logger -// Used by github.com/sourcenetwork/defradb/datastore/badger/v3 +// Used by github.com/sourcenetwork/defradb/datastore/badger/v4 type goLoggerV2 struct { *logger *gologV2.ZapEventLogger diff --git a/merkle/clock/clock.go b/merkle/clock/clock.go index f9debd42c9..95151e2e76 100644 --- a/merkle/clock/clock.go +++ b/merkle/clock/clock.go @@ -16,7 +16,6 @@ package clock import ( "context" - dshelp "github.com/ipfs/boxo/datastore/dshelp" cid "github.com/ipfs/go-cid" ipld "github.com/ipfs/go-ipld-format" @@ -56,13 +55,8 @@ func NewMerkleClock( func (mc *MerkleClock) putBlock( ctx context.Context, heads []cid.Cid, - height uint64, delta core.Delta, ) (ipld.Node, error) { - if delta != nil { - delta.SetPriority(height) - } - node, err := makeNode(delta, heads) if err != nil { return nil, NewErrCreatingBlock(err) @@ -103,7 +97,7 @@ func (mc *MerkleClock) AddDAGNode( delta.SetPriority(height) // write the delta and heads to a new block - nd, err := mc.putBlock(ctx, heads, height, delta) + nd, err := mc.putBlock(ctx, heads, delta) if err != nil { return nil, err } @@ -113,8 +107,6 @@ func (mc *MerkleClock) AddDAGNode( _, err = mc.ProcessNode( ctx, &CrdtNodeGetter{DeltaExtractor: mc.crdt.DeltaDecode}, - nd.Cid(), - height, delta, nd, ) @@ -126,16 +118,16 @@ func (mc *MerkleClock) AddDAGNode( func (mc *MerkleClock) ProcessNode( ctx context.Context, ng core.NodeGetter, - root cid.Cid, - rootPrio uint64, delta core.Delta, node ipld.Node, ) ([]cid.Cid, error) { - current := node.Cid() - log.Debug(ctx, "Running ProcessNode", logging.NewKV("CID", current)) - err := mc.crdt.Merge(ctx, delta, dshelp.MultihashToDsKey(current.Hash()).String()) + nodeCid := node.Cid() + priority := delta.GetPriority() + + log.Debug(ctx, "Running ProcessNode", logging.NewKV("CID", nodeCid)) + err := mc.crdt.Merge(ctx, delta) if err != nil { - return nil, NewErrMergingDelta(current, err) + return nil, NewErrMergingDelta(nodeCid, err) } links := node.Links() @@ -151,9 +143,9 @@ func (mc *MerkleClock) ProcessNode( } if !hasHeads { // reached the bottom, at a leaf log.Debug(ctx, "No heads found") - err := mc.headset.Write(ctx, root, rootPrio) + err := mc.headset.Write(ctx, nodeCid, priority) if err != nil { - return nil, NewErrAddingHead(root, err) + return nil, NewErrAddingHead(nodeCid, err) } } @@ -171,9 +163,9 @@ func (mc *MerkleClock) ProcessNode( log.Debug(ctx, "Found head, replacing!") // reached one of the current heads, replace it with the tip // of current branch - err = mc.headset.Replace(ctx, linkCid, root, rootPrio) + err = mc.headset.Replace(ctx, linkCid, nodeCid, priority) if err != nil { - return nil, NewErrReplacingHead(linkCid, root, err) + return nil, NewErrReplacingHead(linkCid, nodeCid, err) } continue @@ -187,13 +179,13 @@ func (mc *MerkleClock) ProcessNode( // we reached a non-head node in the known tree. // This means our root block is a new head log.Debug(ctx, "Adding head") - err := mc.headset.Write(ctx, root, rootPrio) + err := mc.headset.Write(ctx, nodeCid, priority) if err != nil { log.ErrorE( ctx, "Failure adding head (when root is a new head)", err, - logging.NewKV("Root", root), + logging.NewKV("Root", nodeCid), ) // OR should this also return like below comment?? // return nil, errors.Wrap("error adding head (when root is new head): %s ", root, err) diff --git a/merkle/clock/clock_test.go b/merkle/clock/clock_test.go index 8cee13a2bb..bdf8dcaeec 100644 --- a/merkle/clock/clock_test.go +++ b/merkle/clock/clock_test.go @@ -58,7 +58,7 @@ func TestMerkleClockPutBlock(t *testing.T) { delta := &crdt.LWWRegDelta{ Data: []byte("test"), } - node, err := clk.putBlock(ctx, nil, 0, delta) + node, err := clk.putBlock(ctx, nil, delta) if err != nil { t.Errorf("Failed to putBlock, err: %v", err) } @@ -85,7 +85,7 @@ func TestMerkleClockPutBlockWithHeads(t *testing.T) { return } heads := []cid.Cid{c} - node, err := clk.putBlock(ctx, heads, 0, delta) + node, err := clk.putBlock(ctx, heads, delta) if err != nil { t.Error("Failed to putBlock with heads:", err) return diff --git a/merkle/crdt/composite.go b/merkle/crdt/composite.go index 7bc2d6b149..d851eb65bb 100644 --- a/merkle/crdt/composite.go +++ b/merkle/crdt/composite.go @@ -133,6 +133,6 @@ func (m *MerkleCompositeDAG) Value(ctx context.Context) ([]byte, error) { // Merge writes the provided delta to state using a supplied merge semantic. // @todo -func (m *MerkleCompositeDAG) Merge(ctx context.Context, other core.Delta, id string) error { - return m.reg.Merge(ctx, other, id) +func (m *MerkleCompositeDAG) Merge(ctx context.Context, other core.Delta) error { + return m.reg.Merge(ctx, other) } diff --git a/merkle/crdt/lwwreg.go b/merkle/crdt/lwwreg.go index d8a831e59f..87b00e2151 100644 --- a/merkle/crdt/lwwreg.go +++ b/merkle/crdt/lwwreg.go @@ -100,6 +100,6 @@ func (mlwwreg *MerkleLWWRegister) Value(ctx context.Context) ([]byte, error) { // Merge writes the provided delta to state using a supplied // merge semantic. -func (mlwwreg *MerkleLWWRegister) Merge(ctx context.Context, other core.Delta, id string) error { - return mlwwreg.reg.Merge(ctx, other, id) +func (mlwwreg *MerkleLWWRegister) Merge(ctx context.Context, other core.Delta) error { + return mlwwreg.reg.Merge(ctx, other) } diff --git a/merkle/crdt/merklecrdt.go b/merkle/crdt/merklecrdt.go index a8746fe5be..89e8d0eb2e 100644 --- a/merkle/crdt/merklecrdt.go +++ b/merkle/crdt/merklecrdt.go @@ -53,8 +53,8 @@ func (base *baseMerkleCRDT) Clock() core.MerkleClock { return base.clock } -func (base *baseMerkleCRDT) Merge(ctx context.Context, other core.Delta, id string) error { - return base.crdt.Merge(ctx, other, id) +func (base *baseMerkleCRDT) Merge(ctx context.Context, other core.Delta) error { + return base.crdt.Merge(ctx, other) } func (base *baseMerkleCRDT) DeltaDecode(node ipld.Node) (core.Delta, error) { diff --git a/net/dag.go b/net/dag.go index d814630f6a..2d49790f90 100644 --- a/net/dag.go +++ b/net/dag.go @@ -130,7 +130,6 @@ func (p *Peer) dagWorker(jobs chan *dagJob) { job.txn, job.collection, job.dsKey, - job.node.Cid(), job.fieldName, job.node, job.nodeGetter, diff --git a/net/node_test.go b/net/node_test.go index b2ab255b20..c622be18ca 100644 --- a/net/node_test.go +++ b/net/node_test.go @@ -16,7 +16,7 @@ import ( "testing" "time" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" "github.com/libp2p/go-libp2p/core/event" "github.com/libp2p/go-libp2p/core/peer" ma "github.com/multiformats/go-multiaddr" @@ -24,7 +24,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/logging" diff --git a/net/process.go b/net/process.go index 35d735d6e5..a2fd446cfe 100644 --- a/net/process.go +++ b/net/process.go @@ -40,7 +40,6 @@ func (p *Peer) processLog( txn datastore.Txn, col client.Collection, dsKey core.DataStoreKey, - c cid.Cid, field string, nd ipld.Node, getter ipld.NodeGetter, @@ -62,7 +61,7 @@ func (p *Peer) processLog( ctx, "Processing PushLog request", logging.NewKV("Datastore key", dsKey), - logging.NewKV("CID", c), + logging.NewKV("CID", nd.Cid()), ) if err := txn.DAGstore().Put(ctx, nd); err != nil { @@ -70,14 +69,14 @@ func (p *Peer) processLog( } ng := p.createNodeGetter(crdt, getter) - cids, err := crdt.Clock().ProcessNode(ctx, ng, c, delta.GetPriority(), delta, nd) + cids, err := crdt.Clock().ProcessNode(ctx, ng, delta, nd) if err != nil { return nil, err } if removeChildren { // mark this obj as done - p.queuedChildren.Remove(c) + p.queuedChildren.Remove(nd.Cid()) } return cids, nil diff --git a/net/server.go b/net/server.go index ad1fd2fb29..7322d845ad 100644 --- a/net/server.go +++ b/net/server.go @@ -29,7 +29,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore/badger/v3" + "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/logging" pb "github.com/sourcenetwork/defradb/net/pb" @@ -278,7 +278,7 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return nil, errors.Wrap("failed to decode block to ipld.Node", err) } - cids, err := s.peer.processLog(ctx, txn, col, docKey, cid, "", nd, getter, false) + cids, err := s.peer.processLog(ctx, txn, col, docKey, "", nd, getter, false) if err != nil { log.ErrorE( ctx, diff --git a/planner/filter/complex.go b/planner/filter/complex.go new file mode 100644 index 0000000000..098caefc9c --- /dev/null +++ b/planner/filter/complex.go @@ -0,0 +1,64 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// IsComplex returns true if the provided filter is complex. +// A filter is considered complex if it contains a relation +// object withing an _or operator not necessarily being +// its direct child. +func IsComplex(filter *mapper.Filter) bool { + if filter == nil { + return false + } + return isComplex(filter.Conditions, false) +} + +func isComplex(conditions any, seekRelation bool) bool { + switch typedCond := conditions.(type) { + case map[connor.FilterKey]any: + for k, v := range typedCond { + if op, ok := k.(*mapper.Operator); ok { + if (op.Operation == request.FilterOpOr && len(v.([]any)) > 1) || + op.Operation == request.FilterOpNot { + if isComplex(v, true) { + return true + } + continue + } + } + if _, isProp := k.(*mapper.PropertyIndex); isProp && seekRelation { + objMap := v.(map[connor.FilterKey]any) + for objK := range objMap { + if _, isRelation := objK.(*mapper.PropertyIndex); isRelation { + return true + } + } + } + if isComplex(v, seekRelation) { + return true + } + } + case []any: + for _, v := range typedCond { + if isComplex(v, seekRelation) { + return true + } + } + default: + return false + } + return false +} diff --git a/planner/filter/complex_test.go b/planner/filter/complex_test.go new file mode 100644 index 0000000000..f16055df74 --- /dev/null +++ b/planner/filter/complex_test.go @@ -0,0 +1,175 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestIsComplex(t *testing.T) { + tests := []struct { + name string + inputFilter map[string]any + isComplex bool + }{ + { + name: "flat structure", + inputFilter: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + isComplex: false, + }, + { + name: "fields within _and", + inputFilter: r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 55)), + ), + isComplex: false, + }, + { + name: "fields within _not", + inputFilter: r("_and", + m("_not", m("name", m("_eq", "John"))), + m("age", m("_gt", 55)), + ), + isComplex: false, + }, + { + name: "fields within _or and _and (with _and root)", + inputFilter: r("_and", + r("_or", + r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 30)), + ), + ), + r("_or", + m("name", m("_eq", "Islam")), + m("age", m("_lt", 55)), + ), + ), + isComplex: false, + }, + { + name: "fields within _or and _and (with _or root)", + inputFilter: r("_or", + r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 30)), + ), + m("verified", m("_eq", true)), + ), + isComplex: false, + }, + { + name: "only 1 relation within _or", + inputFilter: r("_or", + m("published", m("rating", m("_gt", 4.0))), + ), + isComplex: false, + }, + { + name: "relation inside _or", + inputFilter: r("_or", + m("published", m("rating", m("_gt", 4.0))), + m("age", m("_gt", 30)), + m("verified", m("_eq", true)), + ), + isComplex: true, + }, + { + name: "relation not inside _or", + inputFilter: r("_and", + r("_or", + m("age", m("_lt", 30)), + m("verified", m("_eq", false)), + ), + r("_or", + r("_and", + m("age", m("_gt", 30)), + ), + m("name", m("_eq", "John")), + ), + r("_and", + m("name", m("_eq", "Islam")), + m("published", m("rating", m("_gt", 4.0))), + ), + ), + isComplex: false, + }, + { + name: "relation inside _and and _or", + inputFilter: r("_and", + r("_or", + m("age", m("_lt", 30)), + m("verified", m("_eq", false)), + ), + r("_or", + r("_and", + m("published", m("rating", m("_gt", 4.0))), + m("age", m("_gt", 30)), + ), + m("name", m("_eq", "John")), + ), + ), + isComplex: true, + }, + { + name: "relation within _not", + inputFilter: m("_not", + m("published", m("rating", m("_gt", 4.0))), + ), + isComplex: true, + }, + { + name: "field inside long _or/_and/_not chain", + inputFilter: m("_not", r("_and", r("_or", m("_not", r("_or", r("_and", + m("name", m("_eq", "John")))), + )))), + isComplex: false, + }, + { + name: "relation inside _and/_or and _not", + inputFilter: r("_and", + r("_or", + m("age", m("_lt", 30)), + m("verified", m("_eq", false)), + ), + r("_or", + m("_not", + m("published", m("rating", m("_gt", 4.0))), + ), + m("name", m("_eq", "John")), + ), + ), + isComplex: true, + }, + } + + mapping := getDocMapping() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping) + actual := IsComplex(inputFilter) + assert.Equal(t, test.isComplex, actual) + }) + } +} + +func TestIsComplexNullFilter(t *testing.T) { + assert.False(t, IsComplex(nil)) +} diff --git a/planner/filter/copy.go b/planner/filter/copy.go new file mode 100644 index 0000000000..fec591f5ab --- /dev/null +++ b/planner/filter/copy.go @@ -0,0 +1,38 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/connor" +) + +// Copy performs a deep copy of the provided filter. +func Copy(filter map[connor.FilterKey]any) map[connor.FilterKey]any { + return copyFilterConditions(filter).(map[connor.FilterKey]any) +} + +func copyFilterConditions(conditions any) any { + switch typedCond := conditions.(type) { + case map[connor.FilterKey]any: + result := make(map[connor.FilterKey]any) + for key, clause := range typedCond { + result[key] = copyFilterConditions(clause) + } + return result + case []any: + resultArr := make([]any, len(typedCond)) + for i, elementClause := range typedCond { + resultArr[i] = copyFilterConditions(elementClause) + } + return resultArr + default: + return conditions + } +} diff --git a/planner/filter/copy_field.go b/planner/filter/copy_field.go new file mode 100644 index 0000000000..59f7db3471 --- /dev/null +++ b/planner/filter/copy_field.go @@ -0,0 +1,73 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// copyField copies the given field from the provided filter. +// The result filter preserves the structure of the original filter. +func copyField(filter *mapper.Filter, field mapper.Field) *mapper.Filter { + if filter == nil { + return nil + } + conditionKey := &mapper.PropertyIndex{ + Index: field.Index, + } + + resultFilter := &mapper.Filter{} + conditionMap := traverseFilterByProperty(conditionKey, filter.Conditions, false) + if len(conditionMap) > 0 { + resultFilter.Conditions = conditionMap + return resultFilter + } + return nil +} + +func traverseFilterByProperty( + key *mapper.PropertyIndex, + conditions map[connor.FilterKey]any, + shouldDelete bool, +) map[connor.FilterKey]any { + result := conditions + if !shouldDelete { + result = make(map[connor.FilterKey]any) + } + for targetKey, clause := range conditions { + if targetKey.Equal(key) { + if shouldDelete { + delete(result, targetKey) + } else { + result[key] = clause + } + } else if opKey, isOpKey := targetKey.(*mapper.Operator); isOpKey { + clauseArr, isArr := clause.([]any) + if isArr { + resultArr := make([]any, 0) + for _, elementClause := range clauseArr { + elementMap, ok := elementClause.(map[connor.FilterKey]any) + if !ok { + continue + } + compoundCond := traverseFilterByProperty(key, elementMap, shouldDelete) + if len(compoundCond) > 0 { + resultArr = append(resultArr, compoundCond) + } + } + if len(resultArr) > 0 { + result[opKey] = resultArr + } + } + } + } + return result +} diff --git a/planner/filter/copy_field_test.go b/planner/filter/copy_field_test.go new file mode 100644 index 0000000000..d3ec10cf62 --- /dev/null +++ b/planner/filter/copy_field_test.go @@ -0,0 +1,90 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" + + "github.com/stretchr/testify/assert" +) + +func TestCopyField(t *testing.T) { + tests := []struct { + name string + inputField mapper.Field + inputFilter map[string]any + expectedFilter map[string]any + }{ + { + name: "flat structure", + inputFilter: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + inputField: mapper.Field{Index: 1}, // age + expectedFilter: m("age", m("_gt", 55)), + }, + { + name: "within _and", + inputFilter: r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 55)), + ), + inputField: mapper.Field{Index: 1}, // age + expectedFilter: r("_and", + m("age", m("_gt", 55)), + ), + }, + { + name: "within _or and _and", + inputFilter: r("_and", + r("_or", + r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 30)), + ), + ), + r("_or", + m("name", m("_eq", "Islam")), + m("age", m("_lt", 55)), + ), + ), + inputField: mapper.Field{Index: 1}, // age + expectedFilter: r("_and", + r("_or", + r("_and", + m("age", m("_gt", 30)), + ), + ), + r("_or", + m("age", m("_lt", 55)), + ), + ), + }, + } + + mapping := getDocMapping() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping) + actualFilter := copyField(inputFilter, test.inputField) + expectedFilter := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter}, mapping) + AssertEqualFilterMap(t, expectedFilter.Conditions, actualFilter.Conditions) + }) + } +} + +func TestCopyFieldOfNullFilter(t *testing.T) { + actualFilter := copyField(nil, mapper.Field{Index: 1}) + assert.Nil(t, actualFilter) +} diff --git a/planner/filter/copy_test.go b/planner/filter/copy_test.go new file mode 100644 index 0000000000..ccb471c2b6 --- /dev/null +++ b/planner/filter/copy_test.go @@ -0,0 +1,144 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestCopyFilter(t *testing.T) { + getFilter := func() map[connor.FilterKey]any { + return map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_or"}: []any{ + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 0}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_eq"}: "Some name", + }, + }, + map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_and"}: []any{ + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 1}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_gt"}: 64, + }, + }, + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 2}: map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 1}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_gt"}: 4.8, + }, + }, + }, + }, + }, + map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_and"}: []any{ + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 1}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_lt"}: 64, + }, + }, + map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 2}: map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 1}: map[connor.FilterKey]any{ + &mapper.Operator{Operation: "_lt"}: 4.8, + }, + }, + }, + }, + }, + }, + } + } + + getFirstArrContent := func(f map[connor.FilterKey]any) []any { + for _, val := range f { + arr, isArr := val.([]any) + if isArr { + return arr + } + } + return nil + } + + hasGtOperator := func(f map[connor.FilterKey]any) bool { + orContent := getFirstArrContent(f) + for _, val := range orContent { + andContent := getFirstArrContent(val.(map[connor.FilterKey]any)) + for _, andEl := range andContent { + elMap := andEl.(map[connor.FilterKey]any) + for _, v := range elMap { + vMap := v.(map[connor.FilterKey]any) + for k := range vMap { + if op, ok := k.(*mapper.Operator); ok && op.Operation == "_gt" { + return true + } + } + } + } + } + return false + } + + tests := []struct { + name string + act func(t *testing.T, original, copyFilter map[connor.FilterKey]any) + }{ + { + name: "add new value to top level", + act: func(t *testing.T, original, copyFilter map[connor.FilterKey]any) { + assert.Len(t, original, 1) + + original[&mapper.Operator{Operation: "_and"}] = []any{} + assert.Len(t, original, 2) + assert.Len(t, copyFilter, 1) + }, + }, + { + name: "change array value", + act: func(t *testing.T, original, copyFilter map[connor.FilterKey]any) { + orContent := getFirstArrContent(original) + assert.True(t, hasGtOperator(original)) + + copy(orContent[1:], orContent[2:]) + assert.False(t, hasGtOperator(original)) + assert.True(t, hasGtOperator(copyFilter)) + }, + }, + { + name: "change nested map value", + act: func(t *testing.T, original, copyFilter map[connor.FilterKey]any) { + getFirstOrEl := func(f map[connor.FilterKey]any) map[connor.FilterKey]any { + orContent := getFirstArrContent(f) + return orContent[0].(map[connor.FilterKey]any) + } + elMap := getFirstOrEl(original) + assert.Len(t, elMap, 1) + + elMap[&mapper.Operator{Operation: "_and"}] = []any{} + + assert.Len(t, getFirstOrEl(original), 2) + assert.Len(t, getFirstOrEl(copyFilter), 1) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + f := getFilter() + test.act(t, f, Copy(f)) + }) + } +} diff --git a/planner/filter/merge.go b/planner/filter/merge.go new file mode 100644 index 0000000000..3bc38f4ba3 --- /dev/null +++ b/planner/filter/merge.go @@ -0,0 +1,39 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// Merge merges two filters into one. +// It basically applies _and to both filters and normalizes them. +func Merge(c1 map[connor.FilterKey]any, c2 map[connor.FilterKey]any) map[connor.FilterKey]any { + if len(c1) == 0 { + return c2 + } + if len(c2) == 0 { + return c1 + } + + result := map[connor.FilterKey]any{ + &mapper.Operator{Operation: request.FilterOpAnd}: []any{ + c1, c2, + }, + } + // we don't use any intelligent way of merging 2 filters using + // some kind of field-by-field analysis. + // The way we merge filters is rather artificial: create a root _and operator + // and put both filters as its children. This makes the resulting filter + // more complex, that's why simplify if by normalizing it. + return normalize(result) +} diff --git a/planner/filter/merge_test.go b/planner/filter/merge_test.go new file mode 100644 index 0000000000..153c850e80 --- /dev/null +++ b/planner/filter/merge_test.go @@ -0,0 +1,75 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestMergeFilterConditions(t *testing.T) { + tests := []struct { + name string + left map[string]any + right map[string]any + expected map[string]any + }{ + { + name: "basic merge", + left: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + right: map[string]any{ + "age": m("_gt", 55), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + "age": m("_gt", 55), + }, + }, + { + name: "basic _and merge", + left: m("_and", []any{ + m("name", m("_eq", "John")), + }), + right: m("_and", []any{ + m("age", m("_gt", 55)), + }), + expected: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + }, + } + + mapping := getDocMapping() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + leftFilter := mapper.ToFilter(request.Filter{Conditions: tt.left}, mapping) + rightFilter := mapper.ToFilter(request.Filter{Conditions: tt.right}, mapping) + actualFilter := Merge(leftFilter.Conditions, rightFilter.Conditions) + expectedFilter := mapper.ToFilter(request.Filter{Conditions: tt.expected}, mapping) + AssertEqualFilterMap(t, expectedFilter.Conditions, actualFilter) + }) + } +} + +func TestMergeNullFilter(t *testing.T) { + f := map[connor.FilterKey]any{ + &mapper.PropertyIndex{Index: 0}: "value1", + } + AssertEqualFilterMap(t, f, Merge(f, nil)) + AssertEqualFilterMap(t, f, Merge(nil, f)) +} diff --git a/planner/filter/normalize.go b/planner/filter/normalize.go new file mode 100644 index 0000000000..5f7d275418 --- /dev/null +++ b/planner/filter/normalize.go @@ -0,0 +1,149 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// normalize normalizes the provided filter conditions. +// The following cases are subject of normalization: +// - _and or _or with one element is removed flattened +// - double _not is removed +// - any number of consecutive _ands with any number of elements is flattened +// As the result object is a map with unique keys (a.k.a. properties), +// while performing flattening of compound operators if the same property +// is present in the result map, both conditions will be moved into an _and +func normalize(conditions map[connor.FilterKey]any) map[connor.FilterKey]any { + return normalizeConditions(conditions, false).(map[connor.FilterKey]any) +} + +func conditionsArrToMap(conditions []any) map[connor.FilterKey]any { + result := make(map[connor.FilterKey]any) + for _, clause := range conditions { + if clauseMap, ok := clause.(map[connor.FilterKey]any); ok { + for k, v := range clauseMap { + result[k] = v + } + } + } + return result +} + +func addNormalizedCondition(key connor.FilterKey, val any, m map[connor.FilterKey]any) { + if _, isProp := key.(*mapper.PropertyIndex); isProp { + var andOp *mapper.Operator + var andContent []any + for existingKey := range m { + if op, isOp := existingKey.(*mapper.Operator); isOp && op.Operation == request.FilterOpAnd { + andOp = op + andContent = m[existingKey].([]any) + break + } + } + for existingKey := range m { + if existingKey.Equal(key) { + existingVal := m[existingKey] + delete(m, existingKey) + if andOp == nil { + andOp = &mapper.Operator{Operation: request.FilterOpAnd} + } + m[andOp] = append( + andContent, + map[connor.FilterKey]any{existingKey: existingVal}, + map[connor.FilterKey]any{key: val}, + ) + return + } + } + for _, andElement := range andContent { + elementMap := andElement.(map[connor.FilterKey]any) + for andElementKey := range elementMap { + if andElementKey.Equal(key) { + m[andOp] = append(andContent, map[connor.FilterKey]any{key: val}) + return + } + } + } + } + m[key] = val +} + +func normalizeConditions(conditions any, skipRoot bool) any { + result := make(map[connor.FilterKey]any) + switch typedConditions := conditions.(type) { + case map[connor.FilterKey]any: + for rootKey, rootVal := range typedConditions { + rootOpKey, isRootOp := rootKey.(*mapper.Operator) + if isRootOp { + if rootOpKey.Operation == request.FilterOpAnd || rootOpKey.Operation == request.FilterOpOr { + rootValArr := rootVal.([]any) + if len(rootValArr) == 1 || rootOpKey.Operation == request.FilterOpAnd && !skipRoot { + flat := normalizeConditions(conditionsArrToMap(rootValArr), false) + flatMap := flat.(map[connor.FilterKey]any) + for k, v := range flatMap { + addNormalizedCondition(k, v, result) + } + } else { + resultArr := []any{} + for i := range rootValArr { + norm := normalizeConditions(rootValArr[i], !skipRoot) + normMap, ok := norm.(map[connor.FilterKey]any) + if ok { + for k, v := range normMap { + resultArr = append(resultArr, map[connor.FilterKey]any{k: v}) + } + } else { + resultArr = append(resultArr, norm) + } + } + addNormalizedCondition(rootKey, resultArr, result) + } + } else if rootOpKey.Operation == request.FilterOpNot { + notMap := rootVal.(map[connor.FilterKey]any) + if len(notMap) == 1 { + var k connor.FilterKey + for k = range notMap { + break + } + norm := normalizeConditions(notMap, true).(map[connor.FilterKey]any) + delete(notMap, k) + var v any + for k, v = range norm { + break + } + if opKey, ok := k.(*mapper.Operator); ok && opKey.Operation == request.FilterOpNot { + notNotMap := normalizeConditions(v, false).(map[connor.FilterKey]any) + for notNotKey, notNotVal := range notNotMap { + addNormalizedCondition(notNotKey, notNotVal, result) + } + } else { + notMap[k] = v + addNormalizedCondition(rootOpKey, notMap, result) + } + } else { + addNormalizedCondition(rootKey, rootVal, result) + } + } else { + addNormalizedCondition(rootKey, rootVal, result) + } + } else { + addNormalizedCondition(rootKey, normalizeConditions(rootVal, false), result) + } + } + return result + case []any: + return conditionsArrToMap(typedConditions) + default: + return conditions + } +} diff --git a/planner/filter/normalize_test.go b/planner/filter/normalize_test.go new file mode 100644 index 0000000000..22e4f69ed0 --- /dev/null +++ b/planner/filter/normalize_test.go @@ -0,0 +1,302 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestNormalizeConditions(t *testing.T) { + tests := []struct { + name string + input map[string]any + expected map[string]any + }{ + { + name: "don't normalize already normalized conditions", + input: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "flatten single _and condition", + input: r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "don't touch single _or condition", + input: r("_or", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + expected: r("_or", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + }, + { + name: "flatten _and with single condition", + input: map[string]any{ + "_and": []any{ + m("name", m("_eq", "John")), + }, + "verified": m("_eq", true), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "flatten _or with single condition", + input: map[string]any{ + "_or": []any{ + m("name", m("_eq", "John")), + }, + "verified": m("_eq", true), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "flatten long _and/_or chain", + input: r("_or", r("_and", r("_or", r("_or", r("_and", r("_and", r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ))))))), + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + }, + }, + { + name: "normalize sibling _and with few conditions", + input: map[string]any{ + "_and": []any{ + r("_and", + m("age", m("_gt", 30)), + m("published", m("rating", m("_lt", 4.8))), + ), + r("_and", m("verified", m("_eq", true))), + }, + "name": m("_eq", "John"), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "published": m("rating", m("_lt", 4.8)), + "age": m("_gt", 30), + "verified": m("_eq", true), + }, + }, + { + name: "don't touch single _not", + input: m("_not", m("name", m("_eq", "John"))), + expected: m("_not", m("name", m("_eq", "John"))), + }, + { + name: "remove double _not", + input: m("_not", m("_not", m("name", m("_eq", "John")))), + expected: m("name", m("_eq", "John")), + }, + { + name: "remove double _not (sibling)", + input: map[string]any{ + "_not": m("_not", m("name", m("_eq", "John"))), + "age": m("_eq", 65), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "age": m("_eq", 65), + }, + }, + { + name: "don't touch double _not if first has few elements", + input: m("_not", map[string]any{ + "_not": m("name", m("_eq", "John")), + "verified": m("_eq", true), + }), + expected: m("_not", map[string]any{ + "_not": m("name", m("_eq", "John")), + "verified": m("_eq", true), + }), + }, + { + name: "normalize long _not chain", + input: m("_not", m("_not", m("_not", m("_not", m("_not", m("name", m("_eq", "John"))))))), + expected: m("_not", m("name", m("_eq", "John"))), + }, + { + name: "normalize _not content", + input: m("_not", r("_and", + m("name", m("_eq", "John")), + r("_and", + m("age", m("_eq", 30)), + m("verified", m("_eq", true)), + ), + )), + expected: m("_not", r("_and", + m("name", m("_eq", "John")), + m("age", m("_eq", 30)), + m("verified", m("_eq", true)), + )), + }, + { + name: "normalize long _not,_and,_or chain", + input: m("_not", r("_and", m("_not", r("_or", m("_not", m("name", m("_eq", "John"))))))), + expected: m("_not", m("name", m("_eq", "John"))), + }, + { + name: "normalize nested arr elements", + input: r("_and", + r("_and", r("_and", m("name", m("_eq", "John")))), + r("_and", m("verified", m("_eq", true))), + r("_and", r("_and", + r("_and", m("age", m("_lt", 55))), + m("published", m("rating", m("_gt", 4.4))), + )), + ), + expected: map[string]any{ + "name": m("_eq", "John"), + "verified": m("_eq", true), + "age": m("_lt", 55), + "published": m("rating", m("_gt", 4.4)), + }, + }, + { + name: "do not flatten _and, child of _or", + input: r("_or", + r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + r("_and", + m("name", m("_eq", "Islam")), + m("verified", m("_eq", false)), + ), + ), + expected: r("_or", + r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + r("_and", + m("name", m("_eq", "Islam")), + m("verified", m("_eq", false)), + ), + ), + }, + { + name: "flatten _and, grand children of _or", + input: r("_or", + r("_and", + r("_and", + m("name", m("_eq", "Islam")), + m("age", m("_eq", "30")), + ), + m("verified", m("_eq", false)), + ), + r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + ), + expected: r("_or", + r("_and", + m("name", m("_eq", "Islam")), + m("age", m("_eq", "30")), + m("verified", m("_eq", false)), + ), + r("_and", + m("name", m("_eq", "John")), + m("verified", m("_eq", true)), + ), + ), + }, + { + name: "squash same keys into _and", + input: map[string]any{ + "_and": []any{ + r("_and", + m("age", m("_gt", 30)), + m("published", m("rating", m("_lt", 4.8))), + ), + r("_and", m("age", m("_lt", 55))), + m("age", m("_ne", 33)), + }, + "name": m("_eq", "John"), + }, + expected: map[string]any{ + "name": m("_eq", "John"), + "published": m("rating", m("_lt", 4.8)), + "_and": []any{ + m("age", m("_gt", 30)), + m("age", m("_lt", 55)), + m("age", m("_ne", 33)), + }, + }, + }, + { + name: "squash same keys into _and (with more matching keys)", + input: map[string]any{ + "_and": []any{ + m("published", m("rating", m("_lt", 4.8))), + r("_and", m("name", m("_ne", "Islam"))), + r("_and", + m("age", m("_gt", 30)), + m("published", m("genre", m("_eq", "Thriller"))), + m("verified", m("_eq", true)), + ), + r("_and", + m("age", m("_lt", 55)), + m("published", m("rating", m("_gt", 4.4)))), + }, + "name": m("_eq", "John"), + }, + expected: map[string]any{ + "_and": []any{ + m("name", m("_eq", "John")), + m("name", m("_ne", "Islam")), + m("published", m("rating", m("_gt", 4.4))), + m("published", m("rating", m("_lt", 4.8))), + m("published", m("genre", m("_eq", "Thriller"))), + m("age", m("_gt", 30)), + m("age", m("_lt", 55)), + }, + "verified": m("_eq", true), + }, + }, + } + + mapping := getDocMapping() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: tt.input}, mapping) + actualFilter := normalize(inputFilter.Conditions) + expectedFilter := mapper.ToFilter(request.Filter{Conditions: tt.expected}, mapping) + AssertEqualFilterMap(t, expectedFilter.Conditions, actualFilter) + }) + } +} diff --git a/planner/filter/remove_field.go b/planner/filter/remove_field.go new file mode 100644 index 0000000000..5c80ffc96c --- /dev/null +++ b/planner/filter/remove_field.go @@ -0,0 +1,26 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// RemoveField removes the given field from the provided filter. +func RemoveField(filter *mapper.Filter, field mapper.Field) { + if filter == nil { + return + } + conditionKey := &mapper.PropertyIndex{ + Index: field.Index, + } + + traverseFilterByProperty(conditionKey, filter.Conditions, true) +} diff --git a/planner/filter/remove_field_test.go b/planner/filter/remove_field_test.go new file mode 100644 index 0000000000..2b6e8cdd3a --- /dev/null +++ b/planner/filter/remove_field_test.go @@ -0,0 +1,87 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func TestRemoveFieldFromFilter(t *testing.T) { + tests := []struct { + name string + inputField mapper.Field + inputFilter map[string]any + expectedFilter map[string]any + }{ + { + name: "flat structure", + inputFilter: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + inputField: mapper.Field{Index: 1}, // age + expectedFilter: m("name", m("_eq", "John")), + }, + { + name: "within _and", + inputFilter: r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 55)), + ), + inputField: mapper.Field{Index: 1}, // age + expectedFilter: r("_and", + m("name", m("_eq", "John")), + ), + }, + { + name: "within _or and _and", + inputFilter: r("_and", + r("_or", + r("_and", + m("name", m("_eq", "John")), + m("age", m("_gt", 30)), + ), + ), + r("_or", + m("name", m("_eq", "Islam")), + m("age", m("_lt", 55)), + ), + ), + inputField: mapper.Field{Index: 1}, // age + expectedFilter: r("_and", + r("_or", + r("_and", + m("name", m("_eq", "John")), + ), + ), + r("_or", + m("name", m("_eq", "Islam")), + ), + ), + }, + } + + mapping := getDocMapping() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping) + RemoveField(inputFilter, test.inputField) + expectedFilter := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter}, mapping) + AssertEqualFilterMap(t, expectedFilter.Conditions, inputFilter.Conditions) + }) + } +} + +func TestRemoveFieldFromNullFilter(t *testing.T) { + RemoveField(nil, mapper.Field{Index: 1}) +} diff --git a/planner/filter/split.go b/planner/filter/split.go new file mode 100644 index 0000000000..bba822145a --- /dev/null +++ b/planner/filter/split.go @@ -0,0 +1,34 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// SplitByField splits the provided filter into 2 filters based on field. +// It can be used for extracting a supType +// Eg. (filter: {age: 10, name: "bob", author: {birthday: "June 26, 1990", ...}, ...}) +// +// In this case the root filter is the conditions that apply to the main type +// ie: {age: 10, name: "bob", ...}. +// +// And the subType filter is the conditions that apply to the queried sub type +// ie: {birthday: "June 26, 1990", ...}. +func SplitByField(filter *mapper.Filter, field mapper.Field) (*mapper.Filter, *mapper.Filter) { + if filter == nil { + return nil, nil + } + + splitF := copyField(filter, field) + RemoveField(filter, field) + + return filter, splitF +} diff --git a/planner/filter/split_test.go b/planner/filter/split_test.go new file mode 100644 index 0000000000..1bcbecffb7 --- /dev/null +++ b/planner/filter/split_test.go @@ -0,0 +1,58 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/planner/mapper" + + "github.com/stretchr/testify/assert" +) + +func TestSplitFilter(t *testing.T) { + tests := []struct { + name string + inputField mapper.Field + inputFilter map[string]any + expectedFilter1 map[string]any + expectedFilter2 map[string]any + }{ + { + name: "flat structure", + inputFilter: map[string]any{ + "name": m("_eq", "John"), + "age": m("_gt", 55), + }, + inputField: mapper.Field{Index: 1}, // age + expectedFilter1: m("name", m("_eq", "John")), + expectedFilter2: m("age", m("_gt", 55)), + }, + } + + mapping := getDocMapping() + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + inputFilter := mapper.ToFilter(request.Filter{Conditions: test.inputFilter}, mapping) + actualFilter1, actualFilter2 := SplitByField(inputFilter, test.inputField) + expectedFilter1 := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter1}, mapping) + expectedFilter2 := mapper.ToFilter(request.Filter{Conditions: test.expectedFilter2}, mapping) + AssertEqualFilterMap(t, expectedFilter1.Conditions, actualFilter1.Conditions) + AssertEqualFilterMap(t, expectedFilter2.Conditions, actualFilter2.Conditions) + }) + } +} + +func TestSplitNullFilter(t *testing.T) { + actualFilter1, actualFilter2 := SplitByField(nil, mapper.Field{Index: 1}) + assert.Nil(t, actualFilter1) + assert.Nil(t, actualFilter2) +} diff --git a/planner/filter/util_test.go b/planner/filter/util_test.go new file mode 100644 index 0000000000..e8860081c8 --- /dev/null +++ b/planner/filter/util_test.go @@ -0,0 +1,140 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package filter + +import ( + "fmt" + "reflect" + "testing" + + "github.com/sourcenetwork/defradb/connor" + "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +func assertEqualFilterMap(expected, actual map[connor.FilterKey]any, prefix string) string { + if len(expected) != len(actual) { + return fmt.Sprintf("Mismatch at %s: Expected map length: %d, but got: %d", prefix, len(expected), len(actual)) + } + + findMatchingKey := func(key connor.FilterKey, m map[connor.FilterKey]any) connor.FilterKey { + for k := range m { + if k.Equal(key) { + return k + } + } + return nil + } + + for expKey, expVal := range expected { + actKey := findMatchingKey(expKey, actual) + if actKey == nil { + return fmt.Sprintf("Mismatch at %s: Expected key %v not found in actual map", prefix, expKey) + } + actVal := actual[actKey] + + newPrefix := fmt.Sprintf("%s.%v", prefix, expKey) + switch expTypedVal := expVal.(type) { + case map[connor.FilterKey]any: + actTypedVal, ok := actVal.(map[connor.FilterKey]any) + if !ok { + return fmt.Sprintf("Mismatch at %s: Expected a nested map[FilterKey]any for key %v, but got: %v", prefix, expKey, actVal) + } + errMsg := assertEqualFilterMap(expTypedVal, actTypedVal, newPrefix) + if errMsg != "" { + return errMsg + } + case []any: + actTypedVal, ok := actVal.([]any) + if !ok { + return fmt.Sprintf("Mismatch at %s: Expected a nested []any for key %v, but got: %v", newPrefix, expKey, actVal) + } + if len(expTypedVal) != len(actTypedVal) { + return fmt.Sprintf("Mismatch at %s: Expected slice length: %d, but got: %d", newPrefix, len(expTypedVal), len(actTypedVal)) + } + numElements := len(expTypedVal) + for i := 0; i < numElements; i++ { + for j := 0; j < numElements; j++ { + errMsg := compareElements(expTypedVal[i], actTypedVal[j], expKey, newPrefix) + if errMsg == "" { + actTypedVal = append(actTypedVal[:j], actTypedVal[j+1:]...) + break + } + } + if len(actTypedVal) != numElements-i-1 { + return fmt.Sprintf("Mismatch at %s: Expected element not found: %d", newPrefix, expTypedVal[i]) + } + } + default: + if !reflect.DeepEqual(expVal, actVal) { + return fmt.Sprintf("Mismatch at %s: Expected value %v for key %v, but got %v", prefix, expVal, expKey, actVal) + } + } + } + return "" +} + +func compareElements(expected, actual any, key connor.FilterKey, prefix string) string { + switch expElem := expected.(type) { + case map[connor.FilterKey]any: + actElem, ok := actual.(map[connor.FilterKey]any) + if !ok { + return fmt.Sprintf("Mismatch at %s: Expected a nested map[FilterKey]any for key %v, but got: %v", prefix, key, actual) + } + return assertEqualFilterMap(expElem, actElem, prefix) + default: + if !reflect.DeepEqual(expElem, actual) { + return fmt.Sprintf("Mismatch at %s: Expected value %v for key %v, but got %v", prefix, expElem, key, actual) + } + } + return "" +} + +func AssertEqualFilterMap(t *testing.T, expected, actual map[connor.FilterKey]any) { + errMsg := assertEqualFilterMap(expected, actual, "root") + if errMsg != "" { + t.Fatal(errMsg) + } +} + +func AssertEqualFilter(t *testing.T, expected, actual *mapper.Filter) { + if expected == nil && actual == nil { + return + } + + if expected == nil || actual == nil { + t.Fatalf("Expected %v, but got %v", expected, actual) + return + } + + AssertEqualFilterMap(t, expected.Conditions, actual.Conditions) + + if !reflect.DeepEqual(expected.ExternalConditions, actual.ExternalConditions) { + t.Errorf("Expected external conditions \n\t%v\n, but got \n\t%v", + expected.ExternalConditions, actual.ExternalConditions) + } +} + +func m(op string, val any) map[string]any { + return map[string]any{op: val} +} + +func r(op string, vals ...any) map[string]any { + return m(op, vals) +} + +func getDocMapping() *core.DocumentMapping { + return &core.DocumentMapping{ + IndexesByName: map[string][]int{"name": {0}, "age": {1}, "published": {2}, "verified": {3}}, + ChildMappings: []*core.DocumentMapping{nil, nil, { + IndexesByName: map[string][]int{"rating": {11}, "genre": {12}}, + }}, + } +} diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index 5b823f6ec2..b6f80a55a2 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -16,7 +16,6 @@ import ( "strings" "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" @@ -93,6 +92,11 @@ func toSelect( return nil, err } + fields, err = resolveSecondaryRelationIDs(descriptionsRepo, desc, mapping, fields) + if err != nil { + return nil, err + } + // Resolve groupBy mappings i.e. alias remapping and handle missed inner group. if selectRequest.GroupBy.HasValue() { groupByFields := selectRequest.GroupBy.Value().Fields @@ -783,6 +787,7 @@ func resolveFilterDependencies( source.Value().Conditions, mapping, existingFields, + nil, ) } @@ -792,77 +797,83 @@ func resolveInnerFilterDependencies( source map[string]any, mapping *core.DocumentMapping, existingFields []Requestable, + resolvedFields []Requestable, ) ([]Requestable, error) { newFields := []Requestable{} -sourceLoop: for key := range source { - if strings.HasPrefix(key, "_") && key != request.KeyFieldName { - continue - } - - propertyMapped := len(mapping.IndexesByName[key]) != 0 - - if !propertyMapped { - index := mapping.GetNextIndex() - - dummyParsed := &request.Select{ - Field: request.Field{ - Name: key, - }, - } + if key == request.FilterOpAnd || key == request.FilterOpOr { + compoundFilter := source[key].([]any) + for _, innerFilter := range compoundFilter { + innerFields, err := resolveInnerFilterDependencies( + descriptionsRepo, + parentCollectionName, + innerFilter.(map[string]any), + mapping, + existingFields, + resolvedFields, + ) + if err != nil { + return nil, err + } - childCollectionName, err := getCollectionName(descriptionsRepo, dummyParsed, parentCollectionName) - if err != nil { - return nil, err + resolvedFields = append(resolvedFields, innerFields...) + newFields = append(newFields, innerFields...) } - - childMapping, _, err := getTopLevelInfo(descriptionsRepo, dummyParsed, childCollectionName) + continue + } else if key == request.FilterOpNot { + notFilter := source[key].(map[string]any) + innerFields, err := resolveInnerFilterDependencies( + descriptionsRepo, + parentCollectionName, + notFilter, + mapping, + existingFields, + resolvedFields, + ) if err != nil { return nil, err } - childMapping = childMapping.CloneWithoutRender() - mapping.SetChildAt(index, childMapping) - - dummyJoin := &Select{ - Targetable: Targetable{ - Field: Field{ - Index: index, - Name: key, - }, - }, - CollectionName: childCollectionName, - DocumentMapping: childMapping, - } - newFields = append(newFields, dummyJoin) - mapping.Add(index, key) + resolvedFields = append(resolvedFields, innerFields...) + newFields = append(newFields, innerFields...) + continue } - keyIndex := mapping.FirstIndexOfName(key) + propertyMapped := len(mapping.IndexesByName[key]) != 0 - if keyIndex >= len(mapping.ChildMappings) { - // If the key index is outside the bounds of the child mapping array, then - // this is not a relation/join and we can add it to the fields and - // continue (no child props to process) - for _, field := range existingFields { - if field.GetIndex() == keyIndex { - continue sourceLoop + var childSelect *Select + if propertyMapped { + var field Requestable + for _, f := range existingFields { + if f.GetIndex() == mapping.FirstIndexOfName(key) { + field = f + break } } - newFields = append(existingFields, &Field{ - Index: keyIndex, - Name: key, - }) - - continue - } + for _, f := range resolvedFields { + if f.GetIndex() == mapping.FirstIndexOfName(key) { + field = f + break + } + } + if field == nil { + newFields = append(newFields, &Field{Index: mapping.FirstIndexOfName(key), Name: key}) + continue + } + var isSelect bool + childSelect, isSelect = field.(*Select) + if !isSelect { + continue + } + } else { + var err error + childSelect, err = constructEmptyJoin(descriptionsRepo, parentCollectionName, mapping, key) + if err != nil { + return nil, err + } - childMap := mapping.ChildMappings[keyIndex] - if childMap == nil { - // If childMap is nil, then this is not a relation/join and we can continue - // (no child props to process) - continue + newFields = append(newFields, childSelect) } childSource := source[key] @@ -873,61 +884,155 @@ sourceLoop: continue } - dummyParsed := &request.Select{ - Field: request.Field{ - Name: key, - }, - } - + dummyParsed := &request.Select{Field: request.Field{Name: key}} childCollectionName, err := getCollectionName(descriptionsRepo, dummyParsed, parentCollectionName) if err != nil { return nil, err } - allFields := enumerable.Concat( - enumerable.New(newFields), - enumerable.New(existingFields), - ) - - matchingFields := enumerable.Where[Requestable](allFields, func(existingField Requestable) (bool, error) { - return existingField.GetIndex() == keyIndex, nil - }) - - matchingHosts := enumerable.Select(matchingFields, func(existingField Requestable) (*Select, error) { - host, isSelect := existingField.AsSelect() - if !isSelect { - // This should never be possible - return nil, client.NewErrUnhandledType("host", existingField) - } - return host, nil - }) - - host, hasHost, err := enumerable.TryGetFirst(matchingHosts) - if err != nil { - return nil, err - } - if !hasHost { - // This should never be possible - return nil, ErrFailedToFindHostField - } - childFields, err := resolveInnerFilterDependencies( descriptionsRepo, childCollectionName, childFilter, - childMap, - host.Fields, + childSelect.DocumentMapping, + childSelect.Fields, + nil, ) if err != nil { return nil, err } - host.Fields = append(host.Fields, childFields...) + childSelect.Fields = append(childSelect.Fields, childFields...) } return newFields, nil } +// constructEmptyJoin constructs a valid empty join with no requested fields. +func constructEmptyJoin( + descriptionsRepo *DescriptionsRepo, + parentCollectionName string, + parentMapping *core.DocumentMapping, + name string, +) (*Select, error) { + index := parentMapping.GetNextIndex() + + dummyParsed := &request.Select{ + Field: request.Field{ + Name: name, + }, + } + + childCollectionName, err := getCollectionName(descriptionsRepo, dummyParsed, parentCollectionName) + if err != nil { + return nil, err + } + + childMapping, _, err := getTopLevelInfo(descriptionsRepo, dummyParsed, childCollectionName) + if err != nil { + return nil, err + } + childMapping = childMapping.CloneWithoutRender() + parentMapping.SetChildAt(index, childMapping) + parentMapping.Add(index, name) + + return &Select{ + Targetable: Targetable{ + Field: Field{ + Index: index, + Name: name, + }, + }, + CollectionName: childCollectionName, + DocumentMapping: childMapping, + }, nil +} + +// resolveSecondaryRelationIDs constructs the required stuff needed to resolve secondary relation ids. +// +// They are handled by joining (if not already done so) the related object and copying its key into the +// secondary relation id field. +// +// They copying itself is handled within [typeJoinOne]. +func resolveSecondaryRelationIDs( + descriptionsRepo *DescriptionsRepo, + desc *client.CollectionDescription, + mapping *core.DocumentMapping, + requestables []Requestable, +) ([]Requestable, error) { + fields := requestables + + for _, requestable := range requestables { + existingField, isField := requestable.(*Field) + if !isField { + continue + } + + fieldDesc, descFound := desc.Schema.GetField(existingField.Name) + if !descFound { + continue + } + + if !fieldDesc.RelationType.IsSet(client.Relation_Type_INTERNAL_ID) { + continue + } + + objectFieldDesc, descFound := desc.Schema.GetField( + strings.TrimSuffix(existingField.Name, request.RelatedObjectID), + ) + if !descFound { + continue + } + + if objectFieldDesc.RelationName == "" { + continue + } + + var siblingFound bool + for _, siblingRequestable := range requestables { + siblingSelect, isSelect := siblingRequestable.(*Select) + if !isSelect { + continue + } + + siblingFieldDesc, descFound := desc.Schema.GetField(siblingSelect.Field.Name) + if !descFound { + continue + } + + if siblingFieldDesc.RelationName != objectFieldDesc.RelationName { + continue + } + + if siblingFieldDesc.Kind != client.FieldKind_FOREIGN_OBJECT { + continue + } + + siblingFound = true + break + } + + if !siblingFound { + objectFieldName := strings.TrimSuffix(existingField.Name, request.RelatedObjectID) + + // We only require the dockey of the related object, so an empty join is all we need. + join, err := constructEmptyJoin( + descriptionsRepo, + desc.Name, + mapping, + objectFieldName, + ) + if err != nil { + return nil, err + } + + fields = append(fields, join) + } + } + + return fields, nil +} + // ToCommitSelect converts the given [request.CommitSelect] into a [CommitSelect]. // // In the process of doing so it will construct the document map required to access the data diff --git a/planner/mapper/targetable.go b/planner/mapper/targetable.go index 49190b911f..bcfdb02ef8 100644 --- a/planner/mapper/targetable.go +++ b/planner/mapper/targetable.go @@ -13,6 +13,7 @@ package mapper import ( "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/connor" "github.com/sourcenetwork/defradb/core" ) @@ -109,7 +110,7 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a case *Operator: switch keyType.Operation { - case "_and", "_or": + case request.FilterOpAnd, request.FilterOpOr: v := v.([]any) logicMapEntries := make([]any, len(v)) for i, item := range v { @@ -117,7 +118,7 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a logicMapEntries[i] = filterObjectToMap(mapping, itemMap) } outmap[keyType.Operation] = logicMapEntries - case "_not": + case request.FilterOpNot: itemMap := v.(map[connor.FilterKey]any) outmap[keyType.Operation] = filterObjectToMap(mapping, itemMap) default: diff --git a/planner/planner.go b/planner/planner.go index 3af7b745e7..bcb0653633 100644 --- a/planner/planner.go +++ b/planner/planner.go @@ -308,16 +308,20 @@ func (p *Planner) expandTypeIndexJoinPlan(plan *typeIndexJoin, parentPlan *selec func (p *Planner) expandGroupNodePlan(topNodeSelect *selectTopNode) error { var sourceNode planNode - var hasScanNode bool - // Find the first scan node in the topNodeSelect, we assume that it will be for the correct collection. - // This may be a commit node. - sourceNode, hasScanNode = walkAndFindPlanType[*scanNode](topNodeSelect.planNode) - if !hasScanNode { - commitNode, hasCommitNode := walkAndFindPlanType[*dagScanNode](topNodeSelect.planNode) - if !hasCommitNode { - return ErrFailedToFindGroupSource + var hasJoinNode bool + // Find the first join, scan, or commit node in the topNodeSelect, + // we assume that it will be for the correct collection. + sourceNode, hasJoinNode = walkAndFindPlanType[*typeIndexJoin](topNodeSelect.planNode) + if !hasJoinNode { + var hasScanNode bool + sourceNode, hasScanNode = walkAndFindPlanType[*scanNode](topNodeSelect.planNode) + if !hasScanNode { + commitNode, hasCommitNode := walkAndFindPlanType[*dagScanNode](topNodeSelect.planNode) + if !hasCommitNode { + return ErrFailedToFindGroupSource + } + sourceNode = commitNode } - sourceNode = commitNode } // Check for any existing pipe nodes in the topNodeSelect, we should use it if there is one diff --git a/planner/scan.go b/planner/scan.go index 43bf47e27a..256711b34e 100644 --- a/planner/scan.go +++ b/planner/scan.go @@ -39,7 +39,6 @@ type scanNode struct { desc client.CollectionDescription fields []client.FieldDescription - docKey []byte showDeleted bool @@ -164,18 +163,21 @@ func (n *scanNode) Next() (bool, error) { return false, nil } - var err error - var execInfo fetcher.ExecInfo - n.docKey, n.currentValue, execInfo, err = n.fetcher.FetchNextDoc(n.p.ctx, n.documentMapping) + doc, execInfo, err := n.fetcher.FetchNext(n.p.ctx) if err != nil { return false, err } n.execInfo.fetches.Add(execInfo) - if len(n.currentValue.Fields) == 0 { + if doc == nil { return false, nil } + n.currentValue, err = fetcher.DecodeToDoc(doc, n.documentMapping, false) + if err != nil { + return false, err + } + n.documentMapping.SetFirstOfName( &n.currentValue, request.DeletedFieldName, diff --git a/planner/type_join.go b/planner/type_join.go index 1bab12b60f..f37437089e 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -11,11 +11,14 @@ package planner import ( + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/connor" "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/db/base" + "github.com/sourcenetwork/defradb/planner/filter" "github.com/sourcenetwork/defradb/planner/mapper" "github.com/sourcenetwork/defradb/request/graphql/schema" ) @@ -208,39 +211,6 @@ func (n *typeIndexJoin) Explain(explainType request.ExplainType) (map[string]any // Merge implements mergeNode func (n *typeIndexJoin) Merge() bool { return true } -// split the provided filter -// into the root and subType components. -// Eg. (filter: {age: 10, name: "bob", author: {birthday: "June 26, 1990", ...}, ...}) -// -// The root filter is the conditions that apply to the main -// type ie: {age: 10, name: "bob", ...}. -// -// The subType filter is the conditions that apply to the -// queried sub type ie: {birthday: "June 26, 1990", ...}. -func splitFilterByType(filter *mapper.Filter, subType int) (*mapper.Filter, *mapper.Filter) { - if filter == nil { - return nil, nil - } - conditionKey := &mapper.PropertyIndex{ - Index: subType, - } - - keyFound, sub := removeConditionIndex(conditionKey, filter.Conditions) - if !keyFound { - return filter, nil - } - - // create new splitup filter - // our schema ensures that if sub exists, its of type map[string]any - splitF := &mapper.Filter{Conditions: map[connor.FilterKey]any{conditionKey: sub}} - - // check if we have any remaining filters - if len(filter.Conditions) == 0 { - return nil, splitF - } - return filter, splitF -} - // typeJoinOne is the plan node for a type index join // where the root type is the primary in a one-to-one relation request. type typeJoinOne struct { @@ -255,7 +225,8 @@ type typeJoinOne struct { subTypeName string subTypeFieldName string - primary bool + primary bool + secondaryFieldIndex immutable.Option[int] spans core.Spans subSelect *mapper.Select @@ -266,19 +237,7 @@ func (p *Planner) makeTypeJoinOne( source planNode, subType *mapper.Select, ) (*typeJoinOne, error) { - // split filter - if scan, ok := source.(*scanNode); ok { - var parentfilter *mapper.Filter - scan.filter, parentfilter = splitFilterByType(scan.filter, subType.Index) - if parentfilter != nil { - if parent.filter == nil { - parent.filter = new(mapper.Filter) - } - parent.filter.Conditions = mergeFilterConditions( - parent.filter.Conditions, parentfilter.Conditions) - } - subType.ShowDeleted = parent.selectReq.ShowDeleted - } + prepareScanNodeFilterForTypeJoin(parent, source, subType) selectPlan, err := p.SubSelect(subType) if err != nil { @@ -293,7 +252,7 @@ func (p *Planner) makeTypeJoinOne( // determine relation direction (primary or secondary?) // check if the field we're querying is the primary side of the relation - isPrimary := subTypeFieldDesc.RelationType&client.Relation_Type_Primary > 0 + isPrimary := subTypeFieldDesc.RelationType.IsSet(client.Relation_Type_Primary) subTypeCollectionDesc, err := p.getCollectionDesc(subType.CollectionName) if err != nil { @@ -305,15 +264,24 @@ func (p *Planner) makeTypeJoinOne( return nil, client.NewErrFieldNotExist(subTypeFieldDesc.RelationName) } + var secondaryFieldIndex immutable.Option[int] + if !isPrimary { + idFieldName := subTypeFieldDesc.Name + request.RelatedObjectID + secondaryFieldIndex = immutable.Some( + parent.documentMapping.FirstIndexOfName(idFieldName), + ) + } + return &typeJoinOne{ - p: p, - root: source, - subSelect: subType, - subTypeName: subType.Name, - subTypeFieldName: subTypeField.Name, - subType: selectPlan, - primary: isPrimary, - docMapper: docMapper{parent.documentMapping}, + p: p, + root: source, + subSelect: subType, + subTypeName: subType.Name, + subTypeFieldName: subTypeField.Name, + subType: selectPlan, + primary: isPrimary, + secondaryFieldIndex: secondaryFieldIndex, + docMapper: docMapper{parent.documentMapping}, }, nil } @@ -360,20 +328,9 @@ func (n *typeJoinOne) Next() (bool, error) { } func (n *typeJoinOne) valuesSecondary(doc core.Doc) (core.Doc, error) { - fkIndex := &mapper.PropertyIndex{ - Index: n.subType.DocumentMap().FirstIndexOfName(n.subTypeFieldName + request.RelatedObjectID), - } - filter := map[connor.FilterKey]any{ - fkIndex: map[connor.FilterKey]any{ - mapper.FilterEqOp: doc.GetKey(), - }, - } - + propIndex := n.subType.DocumentMap().FirstIndexOfName(n.subTypeFieldName + request.RelatedObjectID) // using the doc._key as a filter - err := appendFilterToScanNode(n.subType, filter) - if err != nil { - return core.Doc{}, err - } + setSubTypeFilterToScanNode(n.subType, propIndex, doc.GetKey()) // We have to reset the scan node after appending the new key-filter if err := n.subType.Init(); err != nil { @@ -385,8 +342,13 @@ func (n *typeJoinOne) valuesSecondary(doc core.Doc) (core.Doc, error) { return doc, err } - subdoc := n.subType.Value() - doc.Fields[n.subSelect.Index] = subdoc + subDoc := n.subType.Value() + doc.Fields[n.subSelect.Index] = subDoc + + if n.secondaryFieldIndex.HasValue() { + doc.Fields[n.secondaryFieldIndex.Value()] = subDoc.GetKey() + } + return doc, nil } @@ -417,7 +379,7 @@ func (n *typeJoinOne) valuesPrimary(doc core.Doc) (core.Doc, error) { // if we don't find any docs from our point span lookup // or if we encounter an error just return the base doc, - // with an empty map for the subdoc + // with an empty map for the subDoc next, err := n.subType.Next() if err != nil { @@ -462,24 +424,47 @@ type typeJoinMany struct { subSelect *mapper.Select } -func (p *Planner) makeTypeJoinMany( +func prepareScanNodeFilterForTypeJoin( parent *selectNode, source planNode, subType *mapper.Select, -) (*typeJoinMany, error) { - // split filter - if scan, ok := source.(*scanNode); ok { - var parentfilter *mapper.Filter - scan.filter, parentfilter = splitFilterByType(scan.filter, subType.Index) - if parentfilter != nil { +) { + subType.ShowDeleted = parent.selectReq.ShowDeleted + + scan, ok := source.(*scanNode) + if !ok || scan.filter == nil { + return + } + + if filter.IsComplex(scan.filter) { + if parent.filter == nil { + parent.filter = mapper.NewFilter() + parent.filter.Conditions = filter.Copy(scan.filter.Conditions) + } else { + parent.filter.Conditions = filter.Merge( + parent.filter.Conditions, scan.filter.Conditions) + } + filter.RemoveField(scan.filter, subType.Field) + } else { + var parentFilter *mapper.Filter + scan.filter, parentFilter = filter.SplitByField(scan.filter, subType.Field) + if parentFilter != nil { if parent.filter == nil { - parent.filter = new(mapper.Filter) + parent.filter = parentFilter + } else { + parent.filter.Conditions = filter.Merge( + parent.filter.Conditions, parentFilter.Conditions) } - parent.filter.Conditions = mergeFilterConditions( - parent.filter.Conditions, parentfilter.Conditions) } - subType.ShowDeleted = parent.selectReq.ShowDeleted } +} + +func (p *Planner) makeTypeJoinMany( + parent *selectNode, + source planNode, + subType *mapper.Select, +) (*typeJoinMany, error) { + prepareScanNodeFilterForTypeJoin(parent, source, subType) selectPlan, err := p.SubSelect(subType) if err != nil { @@ -543,26 +528,15 @@ func (n *typeJoinMany) Next() (bool, error) { n.currentValue = n.root.Value() // check if theres an index - // if there is, scan and aggregate resuts + // if there is, scan and aggregate results // if not, then manually scan the subtype table - subdocs := make([]core.Doc, 0) + subDocs := make([]core.Doc, 0) if n.index != nil { // @todo: handle index for one-to-many setup } else { - fkIndex := &mapper.PropertyIndex{ - Index: n.subSelect.FirstIndexOfName(n.rootName + request.RelatedObjectID), - } - filter := map[connor.FilterKey]any{ - fkIndex: map[connor.FilterKey]any{ - mapper.FilterEqOp: n.currentValue.GetKey(), - }, - } - + propIndex := n.subSelect.FirstIndexOfName(n.rootName + request.RelatedObjectID) // using the doc._key as a filter - err := appendFilterToScanNode(n.subType, filter) - if err != nil { - return false, err - } + setSubTypeFilterToScanNode(n.subType, propIndex, n.currentValue.GetKey()) // reset scan node if err := n.subType.Init(); err != nil { @@ -578,12 +552,12 @@ func (n *typeJoinMany) Next() (bool, error) { break } - subdoc := n.subType.Value() - subdocs = append(subdocs, subdoc) + subDoc := n.subType.Value() + subDocs = append(subDocs, subDoc) } } - n.currentValue.Fields[n.subSelect.Index] = subdocs + n.currentValue.Fields[n.subSelect.Index] = subDocs return true, nil } @@ -597,53 +571,35 @@ func (n *typeJoinMany) Close() error { func (n *typeJoinMany) Source() planNode { return n.root } -func appendFilterToScanNode(plan planNode, filterCondition map[connor.FilterKey]any) error { - switch node := plan.(type) { - case *scanNode: - filter := node.filter - if filter == nil && len(filterCondition) > 0 { - filter = mapper.NewFilter() - } - - filter.Conditions = mergeFilterConditions(filter.Conditions, filterCondition) - - node.filter = filter - case nil: - return nil - default: - return appendFilterToScanNode(node.Source(), filterCondition) +func setSubTypeFilterToScanNode(plan planNode, propIndex int, key string) { + scan := getScanNode(plan) + if scan == nil { + return } - return nil -} -// merge into dest with src, return dest -func mergeFilterConditions(dest map[connor.FilterKey]any, src map[connor.FilterKey]any) map[connor.FilterKey]any { - if dest == nil { - dest = make(map[connor.FilterKey]any) + if scan.filter == nil { + scan.filter = mapper.NewFilter() } - // merge filter conditions - for k, v := range src { - indexKey, isIndexKey := k.(*mapper.PropertyIndex) - if !isIndexKey { - continue - } - removeConditionIndex(indexKey, dest) - dest[k] = v + + propertyIndex := &mapper.PropertyIndex{Index: propIndex} + filterConditions := map[connor.FilterKey]any{ + propertyIndex: map[connor.FilterKey]any{ + mapper.FilterEqOp: key, + }, } - return dest + + filter.RemoveField(scan.filter, mapper.Field{Index: propIndex}) + scan.filter.Conditions = filter.Merge(scan.filter.Conditions, filterConditions) } -func removeConditionIndex( - key *mapper.PropertyIndex, - filterConditions map[connor.FilterKey]any, -) (bool, any) { - for targetKey, clause := range filterConditions { - if indexKey, isIndexKey := targetKey.(*mapper.PropertyIndex); isIndexKey { - if key.Index == indexKey.Index { - delete(filterConditions, targetKey) - return true, clause - } +func getScanNode(plan planNode) *scanNode { + node := plan + for node != nil { + scanNode, ok := node.(*scanNode) + if ok { + return scanNode } + node = node.Source() } - return false, nil + return nil } diff --git a/playground/package-lock.json b/playground/package-lock.json index 9eb027211f..952d577128 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -8,25 +8,25 @@ "name": "playground", "version": "0.0.0", "dependencies": { - "@tanstack/react-query": "^4.32.0", + "@tanstack/react-query": "^4.35.3", "fast-json-patch": "^3.1.1", - "graphiql": "^3.0.4", - "graphql": "^16.7.1", + "graphiql": "^3.0.5", + "graphql": "^16.8.0", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-hook-form": "^7.45.2" + "react-hook-form": "^7.46.1" }, "devDependencies": { - "@types/react": "^18.2.15", + "@types/react": "^18.2.21", "@types/react-dom": "^18.2.7", - "@typescript-eslint/eslint-plugin": "^5.59.0", - "@typescript-eslint/parser": "^5.62.0", + "@typescript-eslint/eslint-plugin": "^6.7.0", + "@typescript-eslint/parser": "^6.7.0", "@vitejs/plugin-react-swc": "^3.0.0", - "eslint": "^8.45.0", + "eslint": "^8.49.0", "eslint-plugin-react-hooks": "^4.6.0", - "eslint-plugin-react-refresh": "^0.3.4", - "typescript": "^5.0.2", - "vite": "^4.3.9" + "eslint-plugin-react-refresh": "^0.4.3", + "typescript": "^5.2.2", + "vite": "^4.4.9" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -70,9 +70,9 @@ "peer": true }, "node_modules/@codemirror/view": { - "version": "6.15.3", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.15.3.tgz", - "integrity": "sha512-chNgR8H7Ipx7AZUt0+Kknk7BCow/ron3mHd1VZdM7hQXiI79+UlWqcxpCiexTxZQ+iSkqndk3HHAclJOcjSuog==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.16.0.tgz", + "integrity": "sha512-1Z2HkvkC3KR/oEZVuW9Ivmp8TWLzGEd8T8TA04TTwPvqogfkHBdYSlflytDOqmkUxM2d1ywTg7X2dU5mC+SXvg==", "peer": true, "dependencies": { "@codemirror/state": "^6.1.4", @@ -95,10 +95,58 @@ "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==", "optional": true }, + "node_modules/@esbuild/android-arm": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.17.tgz", + "integrity": "sha512-wHsmJG/dnL3OkpAcwbgoBTTMHVi4Uyou3F5mf58ZtmUyIKfcdA7TROav/6tCzET4A3QW2Q2FC+eFneMU+iyOxg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.17.tgz", + "integrity": "sha512-9np+YYdNDed5+Jgr1TdWBsozZ85U1Oa3xW0c7TWqH0y2aGghXtZsuT8nYRbzOMcl0bXZXjOGbksoTtVOlWrRZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.17.tgz", + "integrity": "sha512-O+FeWB/+xya0aLg23hHEM2E3hbfwZzjqumKMSIqcHbNvDa+dza2D0yLuymRBQQnC34CWrsJUXyH2MG5VnLd6uw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz", - "integrity": "sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==", + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.17.tgz", + "integrity": "sha512-M9uJ9VSB1oli2BE/dJs3zVr9kcCBBsE883prage1NWz6pBS++1oNn/7soPNS3+1DGj0FrkSvnED4Bmlu1VAE9g==", "cpu": [ "arm64" ], @@ -111,6 +159,294 @@ "node": ">=12" } }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.17.tgz", + "integrity": "sha512-XDre+J5YeIJDMfp3n0279DFNrGCXlxOuGsWIkRb1NThMZ0BsrWXoTg23Jer7fEXQ9Ye5QjrvXpxnhzl3bHtk0g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.17.tgz", + "integrity": "sha512-cjTzGa3QlNfERa0+ptykyxs5A6FEUQQF0MuilYXYBGdBxD3vxJcKnzDlhDCa1VAJCmAxed6mYhA2KaJIbtiNuQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.17.tgz", + "integrity": "sha512-sOxEvR8d7V7Kw8QqzxWc7bFfnWnGdaFBut1dRUYtu+EIRXefBc/eIsiUiShnW0hM3FmQ5Zf27suDuHsKgZ5QrA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.17.tgz", + "integrity": "sha512-2d3Lw6wkwgSLC2fIvXKoMNGVaeY8qdN0IC3rfuVxJp89CRfA3e3VqWifGDfuakPmp90+ZirmTfye1n4ncjv2lg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.17.tgz", + "integrity": "sha512-c9w3tE7qA3CYWjT+M3BMbwMt+0JYOp3vCMKgVBrCl1nwjAlOMYzEo+gG7QaZ9AtqZFj5MbUc885wuBBmu6aADQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.17.tgz", + "integrity": "sha512-1DS9F966pn5pPnqXYz16dQqWIB0dmDfAQZd6jSSpiT9eX1NzKh07J6VKR3AoXXXEk6CqZMojiVDSZi1SlmKVdg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.17.tgz", + "integrity": "sha512-EvLsxCk6ZF0fpCB6w6eOI2Fc8KW5N6sHlIovNe8uOFObL2O+Mr0bflPHyHwLT6rwMg9r77WOAWb2FqCQrVnwFg==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.17.tgz", + "integrity": "sha512-e0bIdHA5p6l+lwqTE36NAW5hHtw2tNRmHlGBygZC14QObsA3bD4C6sXLJjvnDIjSKhW1/0S3eDy+QmX/uZWEYQ==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.17.tgz", + "integrity": "sha512-BAAilJ0M5O2uMxHYGjFKn4nJKF6fNCdP1E0o5t5fvMYYzeIqy2JdAP88Az5LHt9qBoUa4tDaRpfWt21ep5/WqQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.17.tgz", + "integrity": "sha512-Wh/HW2MPnC3b8BqRSIme/9Zhab36PPH+3zam5pqGRH4pE+4xTrVLx2+XdGp6fVS3L2x+DrsIcsbMleex8fbE6g==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.17.tgz", + "integrity": "sha512-j/34jAl3ul3PNcK3pfI0NSlBANduT2UO5kZ7FCaK33XFv3chDhICLY8wJJWIhiQ+YNdQ9dxqQctRg2bvrMlYgg==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.17.tgz", + "integrity": "sha512-QM50vJ/y+8I60qEmFxMoxIx4de03pGo2HwxdBeFd4nMh364X6TIBZ6VQ5UQmPbQWUVWHWws5MmJXlHAXvJEmpQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.17.tgz", + "integrity": "sha512-/jGlhWR7Sj9JPZHzXyyMZ1RFMkNPjC6QIAan0sDOtIo2TYk3tZn5UDrkE0XgsTQCxWTTOcMPf9p6Rh2hXtl5TQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.17.tgz", + "integrity": "sha512-rSEeYaGgyGGf4qZM2NonMhMOP/5EHp4u9ehFiBrg7stH6BYEEjlkVREuDEcQ0LfIl53OXLxNbfuIj7mr5m29TA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.17.tgz", + "integrity": "sha512-Y7ZBbkLqlSgn4+zot4KUNYst0bFoO68tRgI6mY2FIM+b7ZbyNVtNbDP5y8qlu4/knZZ73fgJDlXID+ohY5zt5g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.17.tgz", + "integrity": "sha512-bwPmTJsEQcbZk26oYpc4c/8PvTY3J5/QK8jM19DVlEsAB41M39aWovWoHtNm78sd6ip6prilxeHosPADXtEJFw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.17.tgz", + "integrity": "sha512-H/XaPtPKli2MhW+3CQueo6Ni3Avggi6hP/YvgkEe1aSaxw+AeO8MFjq8DlgfTd9Iz4Yih3QCZI6YLMoyccnPRg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.17.tgz", + "integrity": "sha512-fGEb8f2BSA3CW7riJVurug65ACLuQAzKq0SSqkY2b2yHHH0MzDfbLyKIGzHwOI/gkHcxM/leuSW6D5w/LMNitA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", @@ -127,18 +463,18 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.1.tgz", - "integrity": "sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==", + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.6.2.tgz", + "integrity": "sha512-pPTNuaAG3QMH+buKyBIGJs3g/S5y0caxw0ygM3YyE6yJFySwiGGSzA+mM3KJ8QQvzeLh3blwgSonkFjgQdxzMw==", "dev": true, "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } }, "node_modules/@eslint/eslintrc": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.0.tgz", - "integrity": "sha512-Lj7DECXqIVCqnqjjHMPna4vn6GJcMgul/wuS0je9OZ9gsL0zzDpKPVtcG1HaDVc+9y+qgXneTeUMbCqXJNpH1A==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz", + "integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==", "dev": true, "dependencies": { "ajv": "^6.12.4", @@ -159,25 +495,29 @@ } }, "node_modules/@eslint/js": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.44.0.tgz", - "integrity": "sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.49.0.tgz", + "integrity": "sha512-1S8uAY/MTJqVx0SC4epBq+N2yhuwtNwLbJYNZyhL2pO1ZVKn5HFXav5T41Ryzy9K9V7ZId2JB2oy/W4aCd9/2w==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, "node_modules/@floating-ui/core": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.3.1.tgz", - "integrity": "sha512-Bu+AMaXNjrpjh41znzHqaz3r2Nr8hHuHZT6V2LBKMhyMl0FgKA62PNYbqnfgmzOhoWZj70Zecisbo4H1rotP5g==" + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.4.1.tgz", + "integrity": "sha512-jk3WqquEJRlcyu7997NtR5PibI+y5bi+LS3hPmguVClypenMsCY3CBa3LAQnozRCtCrYWSEtAdiskpamuJRFOQ==", + "dependencies": { + "@floating-ui/utils": "^0.1.1" + } }, "node_modules/@floating-ui/dom": { - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.4.5.tgz", - "integrity": "sha512-96KnRWkRnuBSSFbj0sFGwwOUd8EkiecINVl0O9wiZlZ64EkpyAOG3Xc2vKKNJmru0Z7RqWNymA+6b8OZqjgyyw==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.5.1.tgz", + "integrity": "sha512-KwvVcPSXg6mQygvA1TjbN/gh///36kKtllIF8SUm0qpFj8+rvYrpvlYdL1JoA71SHpDqgSSdGOSoQ0Mp3uY5aw==", "dependencies": { - "@floating-ui/core": "^1.3.1" + "@floating-ui/core": "^1.4.1", + "@floating-ui/utils": "^0.1.1" } }, "node_modules/@floating-ui/react-dom": { @@ -192,10 +532,15 @@ "react-dom": ">=16.8.0" } }, + "node_modules/@floating-ui/utils": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.1.1.tgz", + "integrity": "sha512-m0G6wlnhm/AX0H12IOWtK8gASEMffnX08RtKkCgTdHb9JpHKGloI7icFfLg9ZmQeavcvR0PKmzxClyuFPSjKWw==" + }, "node_modules/@graphiql/react": { - "version": "0.19.2", - "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.19.2.tgz", - "integrity": "sha512-xdcLLUHr15AUxtv9Jyw7Mlf6Vd9EJb8ULImHTJOzDgW7DNAGUdU6Yu7xTGP/eCx+RrOQON1Bschv8Mjxk56tYg==", + "version": "0.19.3", + "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.19.3.tgz", + "integrity": "sha512-rpxKcmKPhyGfZo1w9h3+E5FY+LXOn8o5fJxpJd2MbLF8segvvWLtJeXL46Q2IkEFqR4uxf00NUTbCwXjRIVaQQ==", "dependencies": { "@graphiql/toolkit": "^0.9.1", "@headlessui/react": "^1.7.15", @@ -238,9 +583,9 @@ } }, "node_modules/@headlessui/react": { - "version": "1.7.15", - "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.15.tgz", - "integrity": "sha512-OTO0XtoRQ6JPB1cKNFYBZv2Q0JMqMGNhYP1CjPvcJvjz8YGokz8oAj89HIYZGN0gZzn/4kk9iUpmMF4Q21Gsqw==", + "version": "1.7.16", + "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.16.tgz", + "integrity": "sha512-2MphIAZdSUacZBT6EXk8AJkj+EuvaaJbtCyHTJrPsz8inhzCl7qeNPI1uk1AUvCgWylVtdN8cVVmnhUDPxPy3g==", "dependencies": { "client-only": "^0.0.1" }, @@ -253,9 +598,9 @@ } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", - "integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==", + "version": "0.11.11", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz", + "integrity": "sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==", "dev": true, "dependencies": { "@humanwhocodes/object-schema": "^1.2.1", @@ -1108,20 +1453,20 @@ } }, "node_modules/@tanstack/query-core": { - "version": "4.32.0", - "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-4.32.0.tgz", - "integrity": "sha512-ei4IYwL2kmlKSlCw9WgvV7PpXi0MiswVwfQRxawhJA690zWO3dU49igaQ/UMTl+Jy9jj9dK5IKAYvbX7kUvviQ==", + "version": "4.35.3", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-4.35.3.tgz", + "integrity": "sha512-PS+WEjd9wzKTyNjjQymvcOe1yg8f3wYc6mD+vb6CKyZAKvu4sIJwryfqfBULITKCla7P9C4l5e9RXePHvZOZeQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" } }, "node_modules/@tanstack/react-query": { - "version": "4.32.0", - "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-4.32.0.tgz", - "integrity": "sha512-B8WUMcByYAH9500ENejDCATOmEZhqjtS9wsfiQ3BNa+s+yAynY8SESI8WWHhSqUmjd0pmCSFRP6BOUGSda3QXA==", + "version": "4.35.3", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-4.35.3.tgz", + "integrity": "sha512-UgTPioip/rGG3EQilXfA2j4BJkhEQsR+KAbF+KIuvQ7j4MkgnTCJF01SfRpIRNtQTlEfz/+IL7+jP8WA8bFbsw==", "dependencies": { - "@tanstack/query-core": "4.32.0", + "@tanstack/query-core": "4.35.3", "use-sync-external-store": "^1.2.0" }, "funding": { @@ -1168,9 +1513,9 @@ "devOptional": true }, "node_modules/@types/react": { - "version": "18.2.15", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.15.tgz", - "integrity": "sha512-oEjE7TQt1fFTFSbf8kkNuc798ahTUzn3Le67/PWjE8MAfYAD/qB7O8hSTcromLFqHCt9bcdOg5GXMokzTjJ5SA==", + "version": "18.2.21", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.21.tgz", + "integrity": "sha512-neFKG/sBAwGxHgXiIxnbm3/AAVQ/cMRS93hvBpg8xYRbeQSPVABp9U2bRnPf0iI4+Ucdv3plSxKK+3CW2ENJxA==", "devOptional": true, "dependencies": { "@types/prop-types": "*", @@ -1194,9 +1539,9 @@ "devOptional": true }, "node_modules/@types/semver": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", - "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==", + "version": "7.5.2", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.2.tgz", + "integrity": "sha512-7aqorHYgdNO4DM36stTiGO3DvKoex9TQRwsJU6vMaFGyqpBA1MNZkz+PG3gaNUPpTAOYhT1WR7M1JyA3fbS9Cw==", "dev": true }, "node_modules/@types/tern": { @@ -1208,32 +1553,33 @@ } }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "5.59.11", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.11.tgz", - "integrity": "sha512-XxuOfTkCUiOSyBWIvHlUraLw/JT/6Io1365RO6ZuI88STKMavJZPNMU0lFcUTeQXEhHiv64CbxYxBNoDVSmghg==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.0.tgz", + "integrity": "sha512-gUqtknHm0TDs1LhY12K2NA3Rmlmp88jK9Tx8vGZMfHeNMLE3GH2e9TRub+y+SOjuYgtOmok+wt1AyDPZqxbNag==", "dev": true, "dependencies": { - "@eslint-community/regexpp": "^4.4.0", - "@typescript-eslint/scope-manager": "5.59.11", - "@typescript-eslint/type-utils": "5.59.11", - "@typescript-eslint/utils": "5.59.11", + "@eslint-community/regexpp": "^4.5.1", + "@typescript-eslint/scope-manager": "6.7.0", + "@typescript-eslint/type-utils": "6.7.0", + "@typescript-eslint/utils": "6.7.0", + "@typescript-eslint/visitor-keys": "6.7.0", "debug": "^4.3.4", - "grapheme-splitter": "^1.0.4", - "ignore": "^5.2.0", - "natural-compare-lite": "^1.4.0", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "graphemer": "^1.4.0", + "ignore": "^5.2.4", + "natural-compare": "^1.4.0", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^5.0.0", - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", + "eslint": "^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "typescript": { @@ -1242,82 +1588,26 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.62.0.tgz", - "integrity": "sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.0.tgz", + "integrity": "sha512-jZKYwqNpNm5kzPVP5z1JXAuxjtl2uG+5NpaMocFPTNC2EdYIgbXIPImObOkhbONxtFTTdoZstLZefbaK+wXZng==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/typescript-estree": "5.62.0", + "@typescript-eslint/scope-manager": "6.7.0", + "@typescript-eslint/types": "6.7.0", + "@typescript-eslint/typescript-estree": "6.7.0", + "@typescript-eslint/visitor-keys": "6.7.0", "debug": "^4.3.4" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz", - "integrity": "sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/visitor-keys": "5.62.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz", - "integrity": "sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz", - "integrity": "sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/visitor-keys": "5.62.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "eslint": "^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "typescript": { @@ -1325,34 +1615,17 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz", - "integrity": "sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.62.0", - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/scope-manager": { - "version": "5.59.11", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.11.tgz", - "integrity": "sha512-dHFOsxoLFtrIcSj5h0QoBT/89hxQONwmn3FOQ0GOQcLOOXm+MIrS8zEAhs4tWl5MraxCY3ZJpaXQQdFMc2Tu+Q==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.0.tgz", + "integrity": "sha512-lAT1Uau20lQyjoLUQ5FUMSX/dS07qux9rYd5FGzKz/Kf8W8ccuvMyldb8hadHdK/qOI7aikvQWqulnEq2nCEYA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.59.11", - "@typescript-eslint/visitor-keys": "5.59.11" + "@typescript-eslint/types": "6.7.0", + "@typescript-eslint/visitor-keys": "6.7.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", @@ -1360,25 +1633,25 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "5.59.11", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.11.tgz", - "integrity": "sha512-LZqVY8hMiVRF2a7/swmkStMYSoXMFlzL6sXV6U/2gL5cwnLWQgLEG8tjWPpaE4rMIdZ6VKWwcffPlo1jPfk43g==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.7.0.tgz", + "integrity": "sha512-f/QabJgDAlpSz3qduCyQT0Fw7hHpmhOzY/Rv6zO3yO+HVIdPfIWhrQoAyG+uZVtWAIS85zAyzgAFfyEr+MgBpg==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "5.59.11", - "@typescript-eslint/utils": "5.59.11", + "@typescript-eslint/typescript-estree": "6.7.0", + "@typescript-eslint/utils": "6.7.0", "debug": "^4.3.4", - "tsutils": "^3.21.0" + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "*" + "eslint": "^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "typescript": { @@ -1387,12 +1660,12 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "5.59.11", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.11.tgz", - "integrity": "sha512-epoN6R6tkvBYSc+cllrz+c2sOFWkbisJZWkOE+y3xHtvYaOE6Wk6B8e114McRJwFRjGvYdJwLXQH5c9osME/AA==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.0.tgz", + "integrity": "sha512-ihPfvOp7pOcN/ysoj0RpBPOx3HQTJTrIN8UZK+WFd3/iDeFHHqeyYxa4hQk4rMhsz9H9mXpR61IzwlBVGXtl9Q==", "dev": true, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", @@ -1400,21 +1673,21 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.59.11", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.11.tgz", - "integrity": "sha512-YupOpot5hJO0maupJXixi6l5ETdrITxeo5eBOeuV7RSKgYdU3G5cxO49/9WRnJq9EMrB7AuTSLH/bqOsXi7wPA==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.0.tgz", + "integrity": "sha512-dPvkXj3n6e9yd/0LfojNU8VMUGHWiLuBZvbM6V6QYD+2qxqInE7J+J/ieY2iGwR9ivf/R/haWGkIj04WVUeiSQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.59.11", - "@typescript-eslint/visitor-keys": "5.59.11", + "@typescript-eslint/types": "6.7.0", + "@typescript-eslint/visitor-keys": "6.7.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", @@ -1427,42 +1700,41 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "5.59.11", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.11.tgz", - "integrity": "sha512-didu2rHSOMUdJThLk4aZ1Or8IcO3HzCw/ZvEjTTIfjIrcdd5cvSIwwDy2AOlE7htSNp7QIZ10fLMyRCveesMLg==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.7.0.tgz", + "integrity": "sha512-MfCq3cM0vh2slSikQYqK2Gq52gvOhe57vD2RM3V4gQRZYX4rDPnKLu5p6cm89+LJiGlwEXU8hkYxhqqEC/V3qA==", "dev": true, "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@types/json-schema": "^7.0.9", - "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.59.11", - "@typescript-eslint/types": "5.59.11", - "@typescript-eslint/typescript-estree": "5.59.11", - "eslint-scope": "^5.1.1", - "semver": "^7.3.7" + "@eslint-community/eslint-utils": "^4.4.0", + "@types/json-schema": "^7.0.12", + "@types/semver": "^7.5.0", + "@typescript-eslint/scope-manager": "6.7.0", + "@typescript-eslint/types": "6.7.0", + "@typescript-eslint/typescript-estree": "6.7.0", + "semver": "^7.5.4" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": "^7.0.0 || ^8.0.0" } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.59.11", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.11.tgz", - "integrity": "sha512-KGYniTGG3AMTuKF9QBD7EIrvufkB6O6uX3knP73xbKLMpH+QRPcgnCxjWXSHjMRuOxFLovljqQgQpR0c7GvjoA==", + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.0.tgz", + "integrity": "sha512-/C1RVgKFDmGMcVGeD8HjKv2bd72oI1KxQDeY8uc66gw9R0OK0eMq48cA+jv9/2Ag6cdrsUGySm1yzYmfz0hxwQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.59.11", - "eslint-visitor-keys": "^3.3.0" + "@typescript-eslint/types": "6.7.0", + "eslint-visitor-keys": "^3.4.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", @@ -1769,9 +2041,9 @@ } }, "node_modules/esbuild": { - "version": "0.17.19", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.17.19.tgz", - "integrity": "sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==", + "version": "0.18.17", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.17.tgz", + "integrity": "sha512-1GJtYnUxsJreHYA0Y+iQz2UEykonY66HNWOb0yXYZi9/kNrORUEHVg87eQsCtqh59PEJ5YVZJO98JHznMJSWjg==", "dev": true, "hasInstallScript": true, "bin": { @@ -1781,28 +2053,28 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/android-arm": "0.17.19", - "@esbuild/android-arm64": "0.17.19", - "@esbuild/android-x64": "0.17.19", - "@esbuild/darwin-arm64": "0.17.19", - "@esbuild/darwin-x64": "0.17.19", - "@esbuild/freebsd-arm64": "0.17.19", - "@esbuild/freebsd-x64": "0.17.19", - "@esbuild/linux-arm": "0.17.19", - "@esbuild/linux-arm64": "0.17.19", - "@esbuild/linux-ia32": "0.17.19", - "@esbuild/linux-loong64": "0.17.19", - "@esbuild/linux-mips64el": "0.17.19", - "@esbuild/linux-ppc64": "0.17.19", - "@esbuild/linux-riscv64": "0.17.19", - "@esbuild/linux-s390x": "0.17.19", - "@esbuild/linux-x64": "0.17.19", - "@esbuild/netbsd-x64": "0.17.19", - "@esbuild/openbsd-x64": "0.17.19", - "@esbuild/sunos-x64": "0.17.19", - "@esbuild/win32-arm64": "0.17.19", - "@esbuild/win32-ia32": "0.17.19", - "@esbuild/win32-x64": "0.17.19" + "@esbuild/android-arm": "0.18.17", + "@esbuild/android-arm64": "0.18.17", + "@esbuild/android-x64": "0.18.17", + "@esbuild/darwin-arm64": "0.18.17", + "@esbuild/darwin-x64": "0.18.17", + "@esbuild/freebsd-arm64": "0.18.17", + "@esbuild/freebsd-x64": "0.18.17", + "@esbuild/linux-arm": "0.18.17", + "@esbuild/linux-arm64": "0.18.17", + "@esbuild/linux-ia32": "0.18.17", + "@esbuild/linux-loong64": "0.18.17", + "@esbuild/linux-mips64el": "0.18.17", + "@esbuild/linux-ppc64": "0.18.17", + "@esbuild/linux-riscv64": "0.18.17", + "@esbuild/linux-s390x": "0.18.17", + "@esbuild/linux-x64": "0.18.17", + "@esbuild/netbsd-x64": "0.18.17", + "@esbuild/openbsd-x64": "0.18.17", + "@esbuild/sunos-x64": "0.18.17", + "@esbuild/win32-arm64": "0.18.17", + "@esbuild/win32-ia32": "0.18.17", + "@esbuild/win32-x64": "0.18.17" } }, "node_modules/escape-string-regexp": { @@ -1818,27 +2090,27 @@ } }, "node_modules/eslint": { - "version": "8.45.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.45.0.tgz", - "integrity": "sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.49.0.tgz", + "integrity": "sha512-jw03ENfm6VJI0jA9U+8H5zfl5b+FvuU3YYvZRdZHOlU2ggJkxrlkJH4HcDrZpj6YwD8kuYqvQM8LyesoazrSOQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.1.0", - "@eslint/js": "8.44.0", - "@humanwhocodes/config-array": "^0.11.10", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.2", + "@eslint/js": "8.49.0", + "@humanwhocodes/config-array": "^0.11.11", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.10.0", + "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.0", - "eslint-visitor-keys": "^3.4.1", - "espree": "^9.6.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -1884,31 +2156,18 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.3.5.tgz", - "integrity": "sha512-61qNIsc7fo9Pp/mju0J83kzvLm0Bsayu7OQSLEoJxLDCBjIIyb87bkzufoOvdDxLkSlMfkF7UxomC4+eztUBSA==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.3.tgz", + "integrity": "sha512-Hh0wv8bUNY877+sI0BlCUlsS0TYYQqvzEwJsJJPM2WF4RnTStSnSR3zdJYa2nPOJgg3UghXi54lVyMSmpCalzA==", "dev": true, "peerDependencies": { "eslint": ">=7" } }, - "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, "node_modules/eslint-visitor-keys": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", - "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1918,9 +2177,9 @@ } }, "node_modules/eslint/node_modules/eslint-scope": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", - "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, "dependencies": { "esrecurse": "^4.3.0", @@ -2001,15 +2260,6 @@ "node": ">=4.0" } }, - "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -2237,9 +2487,9 @@ } }, "node_modules/globals": { - "version": "13.20.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", - "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", + "version": "13.21.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz", + "integrity": "sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -2271,12 +2521,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/grapheme-splitter": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", - "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", - "dev": true - }, "node_modules/graphemer": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", @@ -2284,11 +2528,11 @@ "dev": true }, "node_modules/graphiql": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.0.4.tgz", - "integrity": "sha512-5NVEG1I8CkpHtZEauvHnU4yoVPjktTHiSMsxXCMwEB6OMkvSg71Fix1MtTc1k/8HnJUTomIDLodRAiRM3Hu+dQ==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.0.5.tgz", + "integrity": "sha512-R02CKVXPajOmJcg0TAKuRMU8qvwb7ltGDYqbaQMKbLeYYw/wQUrmTxLwdVuRadgRL4ubNzl3q5vKTkQKR5Ay2Q==", "dependencies": { - "@graphiql/react": "^0.19.2", + "@graphiql/react": "^0.19.3", "@graphiql/toolkit": "^0.9.1", "graphql-language-service": "^5.1.7", "markdown-it": "^12.2.0" @@ -2300,9 +2544,9 @@ } }, "node_modules/graphql": { - "version": "16.7.1", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.7.1.tgz", - "integrity": "sha512-DRYR9tf+UGU0KOsMcKAlXeFfX89UiiIZ0dRU3mR0yJfu6OjZqUcp68NnFLnqQU5RexygFoDy1EW+ccOYcPfmHg==", + "version": "16.8.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.0.tgz", + "integrity": "sha512-0oKGaR+y3qcS5mCu1vb7KG+a89vjn06C7Ihq/dDl3jA+A8B3TKomvi3CiEcVLJQGalbu8F52LxkOym7U5sSfbg==", "engines": { "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" } @@ -2660,12 +2904,6 @@ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, - "node_modules/natural-compare-lite": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", - "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", - "dev": true - }, "node_modules/nullthrows": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/nullthrows/-/nullthrows-1.1.1.tgz", @@ -2810,9 +3048,9 @@ "integrity": "sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==" }, "node_modules/postcss": { - "version": "8.4.24", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.24.tgz", - "integrity": "sha512-M0RzbcI0sO/XJNucsGjvWU9ERWxb/ytp1w6dKtxTKgixdtQDq4rmx/g8W1hnaheq9jgwL/oyEdH5Bc4WwJKMqg==", + "version": "8.4.27", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.27.tgz", + "integrity": "sha512-gY/ACJtJPSmUFPDCHtX78+01fHa64FaU4zaaWfuh1MhGJISufJAH4cun6k/8fwsHYeK4UQmENQK+tRLCFJE8JQ==", "dev": true, "funding": [ { @@ -2899,9 +3137,9 @@ } }, "node_modules/react-hook-form": { - "version": "7.45.2", - "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.45.2.tgz", - "integrity": "sha512-9s45OdTaKN+4NSTbXVqeDITd/nwIg++nxJGL8+OD5uf1DxvhsXQ641kaYHk5K28cpIOTYm71O/fYk7rFaygb3A==", + "version": "7.46.1", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.46.1.tgz", + "integrity": "sha512-0GfI31LRTBd5tqbXMGXT1Rdsv3rnvy0FjEk8Gn9/4tp6+s77T7DPZuGEpBRXOauL+NhyGT5iaXzdIM2R6F/E+w==", "engines": { "node": ">=12.22.0" }, @@ -3035,9 +3273,9 @@ } }, "node_modules/rollup": { - "version": "3.25.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.25.1.tgz", - "integrity": "sha512-tywOR+rwIt5m2ZAWSe5AIJcTat8vGlnPFAv15ycCrw33t6iFsXZ6mzHVFh2psSjxQPmI+xgzMZZizUAukBI4aQ==", + "version": "3.27.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.27.2.tgz", + "integrity": "sha512-YGwmHf7h2oUHkVBT248x0yt6vZkYQ3/rvE5iQuVBh3WO8GcJ6BNeOkpoX1yMHIiBm18EMLjBPIoUDkhgnyxGOQ==", "dev": true, "bin": { "rollup": "dist/bin/rollup" @@ -3082,9 +3320,9 @@ } }, "node_modules/semver": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.1.tgz", - "integrity": "sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==", + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" @@ -3231,25 +3469,16 @@ "resolved": "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz", "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==" }, - "node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - }, - "node_modules/tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "node_modules/ts-api-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.1.tgz", + "integrity": "sha512-lC/RGlPmwdrIBFTX59wwNzqh7aR2otPNPR/5brHZm/XKFYKsfqxihXUe9pU3JI+3vGkl+vyCoNNnPhJn3aLK1A==", "dev": true, - "dependencies": { - "tslib": "^1.8.1" - }, "engines": { - "node": ">= 6" + "node": ">=16.13.0" }, "peerDependencies": { - "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + "typescript": ">=4.2.0" } }, "node_modules/type-check": { @@ -3277,9 +3506,9 @@ } }, "node_modules/typescript": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.3.tgz", - "integrity": "sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw==", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", + "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -3363,14 +3592,14 @@ } }, "node_modules/vite": { - "version": "4.3.9", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.3.9.tgz", - "integrity": "sha512-qsTNZjO9NoJNW7KnOrgYwczm0WctJ8m/yqYAMAK9Lxt4SoySUfS5S8ia9K7JHpa3KEeMfyF8LoJ3c5NeBJy6pg==", + "version": "4.4.9", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.4.9.tgz", + "integrity": "sha512-2mbUn2LlUmNASWwSCNSJ/EG2HuSRTnVNaydp6vMCm5VIqJsjMfbIWtbH2kDuwUVW5mMUKKZvGPX/rqeqVvv1XA==", "dev": true, "dependencies": { - "esbuild": "^0.17.5", - "postcss": "^8.4.23", - "rollup": "^3.21.0" + "esbuild": "^0.18.10", + "postcss": "^8.4.27", + "rollup": "^3.27.1" }, "bin": { "vite": "bin/vite.js" @@ -3378,12 +3607,16 @@ "engines": { "node": "^14.18.0 || >=16.0.0" }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@types/node": ">= 14", "less": "*", + "lightningcss": "^1.21.0", "sass": "*", "stylus": "*", "sugarss": "*", @@ -3396,6 +3629,9 @@ "less": { "optional": true }, + "lightningcss": { + "optional": true + }, "sass": { "optional": true }, diff --git a/playground/package.json b/playground/package.json index 184ec188a0..d211df0704 100644 --- a/playground/package.json +++ b/playground/package.json @@ -10,24 +10,24 @@ "preview": "vite preview" }, "dependencies": { - "@tanstack/react-query": "^4.32.0", + "@tanstack/react-query": "^4.35.3", "fast-json-patch": "^3.1.1", - "graphiql": "^3.0.4", - "graphql": "^16.7.1", + "graphiql": "^3.0.5", + "graphql": "^16.8.0", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-hook-form": "^7.45.2" + "react-hook-form": "^7.46.1" }, "devDependencies": { - "@types/react": "^18.2.15", + "@types/react": "^18.2.21", "@types/react-dom": "^18.2.7", - "@typescript-eslint/eslint-plugin": "^5.59.0", - "@typescript-eslint/parser": "^5.62.0", + "@typescript-eslint/eslint-plugin": "^6.7.0", + "@typescript-eslint/parser": "^6.7.0", "@vitejs/plugin-react-swc": "^3.0.0", - "eslint": "^8.45.0", + "eslint": "^8.49.0", "eslint-plugin-react-hooks": "^4.6.0", - "eslint-plugin-react-refresh": "^0.3.4", - "typescript": "^5.0.2", - "vite": "^4.3.9" + "eslint-plugin-react-refresh": "^0.4.3", + "typescript": "^5.2.2", + "vite": "^4.4.9" } } diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index d48c7bb638..00287c4454 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -443,7 +443,7 @@ func finalizeRelations(relationManager *RelationManager, descriptions []client.C // if not finalized then we are missing one side of the relationship if !rel.finalized { - return NewErrRelationOneSided(field.Schema) + return client.NewErrRelationOneSided(field.Name, field.Schema) } field.RelationType = rel.Kind() | fieldRelationType diff --git a/request/graphql/schema/errors.go b/request/graphql/schema/errors.go index dd0e3baa63..cf28c7d710 100644 --- a/request/graphql/schema/errors.go +++ b/request/graphql/schema/errors.go @@ -26,7 +26,6 @@ const ( errIndexUnknownArgument string = "index with unknown argument" errIndexInvalidArgument string = "index with invalid argument" errIndexInvalidName string = "index with invalid name" - errRelationOneSided string = "relation must be defined on both schemas" ) var ( @@ -49,7 +48,6 @@ var ( ErrIndexMissingFields = errors.New(errIndexMissingFields) ErrIndexWithUnknownArg = errors.New(errIndexUnknownArgument) ErrIndexWithInvalidArg = errors.New(errIndexInvalidArgument) - ErrRelationOneSided = errors.New(errRelationOneSided) ) func NewErrDuplicateField(objectName, fieldName string) error { @@ -81,10 +79,6 @@ func NewErrRelationMissingField(objectName, fieldName string) error { ) } -func NewErrRelationOneSided(typeName string) error { - return errors.New(errRelationOneSided, errors.NewKV("Type", typeName)) -} - func NewErrAggregateTargetNotFound(objectName, target string) error { return errors.New( errAggregateTargetNotFound, diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index 712423158c..4ffe998d88 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -18,7 +18,7 @@ import ( "sync" "testing" - "github.com/dgraph-io/badger/v3" + "github.com/dgraph-io/badger/v4" ds "github.com/ipfs/go-datastore" "github.com/sourcenetwork/defradb/client" diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index 148347aa2f..2f70245b23 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -133,3 +133,5 @@ func (*dummyTxn) Commit(ctx context.Context) error { return nil } func (*dummyTxn) Discard(ctx context.Context) {} func (*dummyTxn) OnSuccess(fn func()) {} func (*dummyTxn) OnError(fn func()) {} +func (*dummyTxn) OnDiscard(fn func()) {} +func (*dummyTxn) ID() uint64 { return 0 } diff --git a/tests/integration/backup/one_to_one/export_test.go b/tests/integration/backup/one_to_one/export_test.go index f6d6123d52..48700907a5 100644 --- a/tests/integration/backup/one_to_one/export_test.go +++ b/tests/integration/backup/one_to_one/export_test.go @@ -65,41 +65,6 @@ func TestBackupExport_AllCollectionsMultipleDocsAndDocUpdate_NoError(t *testing. executeTestCase(t, test) } -// note: This test should fail at the second book creation since the relationship is 1-to-1 and this -// effectively creates a 1-to-many relationship -func TestBackupExport_AllCollectionsMultipleDocsAndMultipleDocUpdate_NoError(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{"name": "John", "age": 30}`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{"name": "Bob", "age": 31}`, - }, - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{"name": "John and the sourcerers' stone", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`, - }, - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`, - }, - testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, - Doc: `{"age": 31}`, - }, - testUtils.BackupExport{ - ExpectedContent: `{"Book":[{"_key":"bae-4399f189-138d-5d49-9e25-82e78463677b","_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}],"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, - }, - }, - } - - executeTestCase(t, test) -} - func TestBackupExport_DoubleReletionship_NoError(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ @@ -191,53 +156,3 @@ func TestBackupExport_DoubleReletionshipWithUpdate_NoError(t *testing.T) { testUtils.ExecuteTestCase(t, test) } - -// note: This test should fail at the second book creation since the relationship is 1-to-1 and this -// effectively creates a 1-to-many relationship -func TestBackupExport_DoubleReletionshipWithUpdateAndDoublylinked_NoError(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type User { - name: String - age: Int - book: Book @relation(name: "written_books") - favouriteBook: Book @relation(name: "favourite_books") - } - type Book { - name: String - author: User @relation(name: "written_books") - favourite: User @relation(name: "favourite_books") - } - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{"name": "John", "age": 30}`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{"name": "Bob", "age": 31}`, - }, - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{"name": "John and the sourcerers' stone", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519", "favourite": "bae-0648f44e-74e8-593b-a662-3310ec278927"}`, - }, - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{"name": "Game of chains"}`, - }, - testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, - Doc: `{"age": 31, "book_id": "bae-da7f2d88-05c4-528a-846a-0d18ab26603b"}`, - }, - testUtils.BackupExport{ - ExpectedContent: `{"Book":[{"_key":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_newKey":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"},{"_key":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"}],"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index 9abe59f06a..85c63f9e99 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -150,9 +150,6 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr executeTestCase(t, test) } -// note: This test should fail at the second book creation since the relationship is 1-to-1 and this -// effectively creates a 1-to-many relationship: -// https://github.com/sourcenetwork/defradb/issues/1646 func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndMultipleUpdatedDocs_NoError(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ @@ -187,50 +184,7 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndMultipleUpdatedD } ] }`, - }, - testUtils.Request{ - Request: ` - query { - User { - name - age - } - }`, - Results: []map[string]any{ - { - "name": "Bob", - "age": uint64(31), - }, - { - "name": "John", - "age": uint64(31), - }, - }, - }, - testUtils.Request{ - Request: ` - query { - Book { - name - author { - _key - } - } - }`, - Results: []map[string]any{ - { - "name": "Game of chains", - "author": map[string]any{ - "_key": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", - }, - }, - { - "name": "John and the sourcerers' stone", - "author": map[string]any{ - "_key": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", - }, - }, - }, + ExpectedError: "target document is already linked to another document.", }, }, } diff --git a/tests/integration/backup/self_reference/export_test.go b/tests/integration/backup/self_reference/export_test.go index 21eb5e95d3..e0d0c606cf 100644 --- a/tests/integration/backup/self_reference/export_test.go +++ b/tests/integration/backup/self_reference/export_test.go @@ -64,58 +64,3 @@ func TestBackupExport_MultipleDocsAndDocUpdate_NoError(t *testing.T) { executeTestCase(t, test) } - -func TestBackupExport_MultipleDocsAndDocUpdateWithSelfReference_NoError(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{"name": "John", "age": 30}`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{"name": "Bob", "age": 31, "boss": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`, - }, - testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, - Doc: `{"boss_id": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`, - }, - testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_newKey":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","age":31,"boss_id":"bae-e933420a-988a-56f8-8952-6c245aebd519","name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"boss_id":"bae-e933420a-988a-56f8-8952-6c245aebd519","name":"John"}]}`, - }, - }, - } - - executeTestCase(t, test) -} - -func TestBackupExport_MultipleDocsAndMultipleDocUpdateWithSelfReference_NoError(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{"name": "John", "age": 30}`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{"name": "Bob", "age": 31, "boss": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`, - }, - testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, - Doc: `{"boss_id": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`, - }, - testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, - Doc: `{"age": 31}`, - }, - testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_newKey":"bae-067fd15e-32a1-5681-8f41-c423f563e21b","age":31,"boss_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"boss_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John"}]}`, - }, - }, - } - - executeTestCase(t, test) -} diff --git a/tests/integration/cli/client_ping_test.go b/tests/integration/cli/client_ping_test.go index 1a77c218ca..a4e1eef96f 100644 --- a/tests/integration/cli/client_ping_test.go +++ b/tests/integration/cli/client_ping_test.go @@ -55,7 +55,7 @@ func TestPingCommandToInvalidHost(t *testing.T) { func TestPingCommandNoHost(t *testing.T) { conf := NewDefraNodeDefaultConfig(t) - p, err := findFreePortInRange(49152, 65535) + p, err := findFreePortInRange(t, 49152, 65535) assert.NoError(t, err) addr := fmt.Sprintf("localhost:%d", p) _, stderr := runDefraCommand(t, conf, []string{"client", "ping", "--url", addr}) diff --git a/tests/integration/cli/client_rpc_replicator_test.go b/tests/integration/cli/client_rpc_replicator_test.go index 509762121d..1fd0e3c351 100644 --- a/tests/integration/cli/client_rpc_replicator_test.go +++ b/tests/integration/cli/client_rpc_replicator_test.go @@ -17,7 +17,7 @@ import ( func TestReplicatorGetAllEmpty(t *testing.T) { conf := NewDefraNodeDefaultConfig(t) - portTCP, err := findFreePortInRange(49152, 65535) + portTCP, err := findFreePortInRange(t, 49152, 65535) if err != nil { t.Fatal(err) } diff --git a/tests/integration/cli/start_test.go b/tests/integration/cli/start_test.go index a49bba9c0d..1a6267f190 100644 --- a/tests/integration/cli/start_test.go +++ b/tests/integration/cli/start_test.go @@ -52,7 +52,7 @@ func TestStartCommandWithStoreMemory(t *testing.T) { func TestStartCommandWithP2PAddr(t *testing.T) { conf := NewDefraNodeDefaultConfig(t) - p2pport, err := findFreePortInRange(49152, 65535) + p2pport, err := findFreePortInRange(t, 49152, 65535) if err != nil { t.Fatal(err) } diff --git a/tests/integration/cli/utils.go b/tests/integration/cli/utils.go index a5999231a4..c94ce222dc 100644 --- a/tests/integration/cli/utils.go +++ b/tests/integration/cli/utils.go @@ -25,6 +25,7 @@ import ( "os" "path/filepath" "strings" + "sync" "testing" "time" @@ -46,11 +47,11 @@ type DefraNodeConfig struct { func NewDefraNodeDefaultConfig(t *testing.T) DefraNodeConfig { t.Helper() - portAPI, err := findFreePortInRange(49152, 65535) + portAPI, err := findFreePortInRange(t, 49152, 65535) if err != nil { t.Fatal(err) } - portGRPC, err := findFreePortInRange(49152, 65535) + portGRPC, err := findFreePortInRange(t, 49152, 65535) if err != nil { t.Fatal(err) } @@ -196,9 +197,12 @@ func captureOutput(f func()) (stdout, stderr []string) { return } +var portsInUse = make(map[int]struct{}) +var portMutex = sync.Mutex{} + // findFreePortInRange returns a free port in the range [minPort, maxPort]. // The range of ports that are unfrequently used is [49152, 65535]. -func findFreePortInRange(minPort, maxPort int) (int, error) { +func findFreePortInRange(t *testing.T, minPort, maxPort int) (int, error) { if minPort < 1 || maxPort > 65535 || minPort > maxPort { return 0, errors.New("invalid port range") } @@ -206,9 +210,20 @@ func findFreePortInRange(minPort, maxPort int) (int, error) { const maxAttempts = 100 for i := 0; i < maxAttempts; i++ { port := rand.Intn(maxPort-minPort+1) + minPort + if _, ok := portsInUse[port]; ok { + continue + } addr := fmt.Sprintf("127.0.0.1:%d", port) listener, err := net.Listen("tcp", addr) if err == nil { + portMutex.Lock() + portsInUse[port] = struct{}{} + portMutex.Unlock() + t.Cleanup(func() { + portMutex.Lock() + delete(portsInUse, port) + portMutex.Unlock() + }) _ = listener.Close() return port, nil } diff --git a/tests/integration/collection/create/one_to_many/simple_test.go b/tests/integration/collection/create/one_to_many/simple_test.go deleted file mode 100644 index 91f12b337e..0000000000 --- a/tests/integration/collection/create/one_to_many/simple_test.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package create - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/client" - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -func TestCreateSaveGivenAliasValueInRelationField(t *testing.T) { - doc, err := client.NewDocFromJSON( - []byte( - `{ - "name": "Painted House", - "author": "ValueDoesntMatter" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - test := testUtils.TestCase{ - CollectionCalls: map[string][]func(client.Collection) error{ - "Book": []func(c client.Collection) error{ - func(c client.Collection) error { - return c.Save(context.Background(), doc) - }, - }, - }, - } - - executeTestCase(t, test) -} diff --git a/tests/integration/collection/create/one_to_many/utils.go b/tests/integration/collection/create/one_to_many/utils.go deleted file mode 100644 index 3f1d0f3027..0000000000 --- a/tests/integration/collection/create/one_to_many/utils.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package create - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -var schema = (` - type Book { - name: String - rating: Float - author: Author - } - - type Author { - name: String - age: Int - verified: Boolean - published: [Book] - } -`) - -func executeTestCase(t *testing.T, test testUtils.TestCase) { - testUtils.ExecuteRequestTestCase(t, schema, test) -} diff --git a/tests/integration/collection/create/one_to_one/save_test.go b/tests/integration/collection/create/one_to_one/save_test.go deleted file mode 100644 index e3b4d0a3b8..0000000000 --- a/tests/integration/collection/create/one_to_one/save_test.go +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package create - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/client" - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -func TestCollectionCreateSaveErrorsNonExistantKeyViaSecondarySide(t *testing.T) { - doc, err := client.NewDocFromJSON( - []byte( - `{ - "name": "Painted House", - "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - test := testUtils.TestCase{ - CollectionCalls: map[string][]func(client.Collection) error{ - "Book": []func(c client.Collection) error{ - func(c client.Collection) error { - return c.Save(context.Background(), doc) - }, - }, - }, - ExpectedError: "no document for the given key exists", - } - - executeTestCase(t, test) -} - -// Note: This test should probably not pass, as it contains a -// reference to a document that doesnt exist. It is doubly odd -// given that saving from the secondary side errors as expected -func TestCollectionCreateSaveCreatesDoc(t *testing.T) { - doc, err := client.NewDocFromJSON( - []byte( - `{ - "name": "John", - "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - test := testUtils.TestCase{ - CollectionCalls: map[string][]func(client.Collection) error{ - "Author": []func(c client.Collection) error{ - func(c client.Collection) error { - err := c.Save(context.Background(), doc) - if err != nil { - return err - } - - d, err := c.Get(context.Background(), doc.Key(), false) - if err != nil { - return err - } - - name, err := d.Get("name") - if err != nil { - return err - } - - assert.Equal(t, "John", name) - - return nil - }, - }, - }, - } - - executeTestCase(t, test) -} - -func TestCollectionCreateSaveFromSecondarySide(t *testing.T) { - doc, err := client.NewDocFromJSON( - []byte( - `{ - "name": "Painted House", - "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - test := testUtils.TestCase{ - Docs: map[string][]string{ - "Author": { - `{ - "name": "John Grisham" - }`, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Book": []func(c client.Collection) error{ - func(c client.Collection) error { - return c.Save(context.Background(), doc) - }, - }, - }, - } - - executeTestCase(t, test) -} diff --git a/tests/integration/collection/create/one_to_one/utils.go b/tests/integration/collection/create/one_to_one/utils.go deleted file mode 100644 index e1f4a8459e..0000000000 --- a/tests/integration/collection/create/one_to_one/utils.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package create - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -var schema = ` - type Book { - name: String - rating: Float - author: Author - } - - type Author { - name: String - age: Int - verified: Boolean - published: Book @primary - } -` - -func executeTestCase(t *testing.T, test testUtils.TestCase) { - testUtils.ExecuteRequestTestCase(t, schema, test) -} diff --git a/tests/integration/collection/update/one_to_one/save_test.go b/tests/integration/collection/update/one_to_one/save_test.go deleted file mode 100644 index d99e2275b3..0000000000 --- a/tests/integration/collection/update/one_to_one/save_test.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/client" - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -func TestUpdateOneToOneSaveErrorsGivenNonExistantKeyViaSecondarySide(t *testing.T) { - doc, err := client.NewDocFromJSON( - []byte( - `{ - "name": "Painted House" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - err = doc.SetWithJSON( - []byte( - `{ - "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - test := testUtils.TestCase{ - Docs: map[string][]string{ - "Book": { - `{ - "name": "Painted House" - }`, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Book": []func(c client.Collection) error{ - func(c client.Collection) error { - return c.Save(context.Background(), doc) - }, - }, - }, - ExpectedError: "no document for the given key exists", - } - - executeTestCase(t, test) -} - -// Note: This test should probably not pass, as it contains a -// reference to a document that doesnt exist. It is doubly odd -// given that saving from the secondary side errors as expected -func TestUpdateOneToOneSavesGivenNewRelationValue(t *testing.T) { - doc, err := client.NewDocFromJSON( - []byte( - `{ - "name": "John Grisham" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - err = doc.SetWithJSON( - []byte( - `{ - "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - test := testUtils.TestCase{ - Docs: map[string][]string{ - "Author": { - `{ - "name": "John Grisham" - }`, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Author": []func(c client.Collection) error{ - func(c client.Collection) error { - return c.Save(context.Background(), doc) - }, - }, - }, - } - - executeTestCase(t, test) -} - -func TestUpdateOneToOneSaveFromSecondarySide(t *testing.T) { - doc, err := client.NewDocFromJSON( - []byte( - `{ - "name": "Painted House" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - err = doc.SetWithJSON( - []byte( - `{ - "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - test := testUtils.TestCase{ - Docs: map[string][]string{ - "Author": { - `{ - "name": "John Grisham" - }`, - }, - "Book": { - `{ - "name": "Painted House" - }`, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Book": []func(c client.Collection) error{ - func(c client.Collection) error { - return c.Save(context.Background(), doc) - }, - }, - }, - } - - executeTestCase(t, test) -} diff --git a/tests/integration/collection/update/one_to_one/utils.go b/tests/integration/collection/update/one_to_one/utils.go deleted file mode 100644 index d7f2f42be3..0000000000 --- a/tests/integration/collection/update/one_to_one/utils.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -var schema = ` - type Book { - name: String - rating: Float - author: Author - } - - type Author { - name: String - age: Int - verified: Boolean - published: Book @primary - } -` - -func executeTestCase(t *testing.T, test testUtils.TestCase) { - testUtils.ExecuteRequestTestCase(t, schema, test) -} diff --git a/tests/integration/collection/update/simple/save_test.go b/tests/integration/collection/update/simple/save_test.go deleted file mode 100644 index 1174af4028..0000000000 --- a/tests/integration/collection/update/simple/save_test.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/client" - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" -) - -func TestUpdateSaveErrorsGivenUnknownField(t *testing.T) { - doc, err := client.NewDocFromJSON( - []byte( - `{ - "name": "John", - "age": 21 - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - err = doc.SetWithJSON( - []byte( - `{ - "FieldDoesNotExist": 21 - }`, - ), - ) - if err != nil { - assert.Fail(t, err.Error()) - } - - test := testUtils.TestCase{ - Docs: map[string][]string{ - "Users": { - `{ - "name": "John", - "age": 21 - }`, - }, - }, - CollectionCalls: map[string][]func(client.Collection) error{ - "Users": []func(c client.Collection) error{ - func(c client.Collection) error { - return c.Save(context.Background(), doc) - }, - }, - }, - ExpectedError: "The given field does not exist", - } - - executeTestCase(t, test) -} diff --git a/tests/integration/explain.go b/tests/integration/explain.go index e4221ea76b..44c457c0f8 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -11,7 +11,6 @@ package tests import ( - "context" "reflect" "sort" "testing" @@ -127,50 +126,43 @@ func executeExplainRequest( for _, node := range getNodes(action.NodeID, s.nodes) { result := node.DB.ExecRequest(s.ctx, action.Request) - assertExplainRequestResults( - s.ctx, - s.t, - s.testCase.Description, - &result.GQL, - action, - ) + assertExplainRequestResults(s, &result.GQL, action) } } func assertExplainRequestResults( - ctx context.Context, - t *testing.T, - description string, + s *state, actualResult *client.GQLResult, action ExplainRequest, ) { // Check expected error matches actual error. If it does we are done. if AssertErrors( - t, - description, + s.t, + s.testCase.Description, actualResult.Errors, action.ExpectedError, ) { return } else if action.ExpectedError != "" { // If didn't find a match but did expected an error, then fail. - assert.Fail(t, "Expected an error however none was raised.", description) + assert.Fail(s.t, "Expected an error however none was raised.", s.testCase.Description) } // Note: if returned gql result is `nil` this panics (the panic seems useful while testing). resultantData := actualResult.Data.([]map[string]any) - log.Info(ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) + log.Info(s.ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) // Check if the expected full explain graph (if provided) matches the actual full explain graph // that is returned, if doesn't match we would like to still see a diff comparison (handy while debugging). if lengthOfExpectedFullGraph := len(action.ExpectedFullGraph); action.ExpectedFullGraph != nil { - require.Equal(t, lengthOfExpectedFullGraph, len(resultantData), description) + require.Equal(s.t, lengthOfExpectedFullGraph, len(resultantData), s.testCase.Description) for index, actualResult := range resultantData { if lengthOfExpectedFullGraph > index { - assert.Equal( - t, + assertResultsEqual( + s.t, + s.clientType, action.ExpectedFullGraph[index], actualResult, - description, + s.testCase.Description, ) } } @@ -179,15 +171,17 @@ func assertExplainRequestResults( // Ensure the complete high-level pattern matches, inother words check that all the // explain graph nodes are in the correct expected ordering. if action.ExpectedPatterns != nil { - require.Equal(t, len(action.ExpectedPatterns), len(resultantData), description) + require.Equal(s.t, len(action.ExpectedPatterns), len(resultantData), s.testCase.Description) + for index, actualResult := range resultantData { // Trim away all attributes (non-plan nodes) from the returned full explain graph result. - actualResultWithoutAttributes := trimExplainAttributes(t, description, actualResult) - assert.Equal( - t, + actualResultWithoutAttributes := trimExplainAttributes(s.t, s.testCase.Description, actualResult) + assertResultsEqual( + s.t, + s.clientType, action.ExpectedPatterns[index], actualResultWithoutAttributes, - description, + s.testCase.Description, ) } } @@ -196,14 +190,13 @@ func assertExplainRequestResults( // Note: This does not check if the node is in correct location or not. if action.ExpectedTargets != nil { for _, target := range action.ExpectedTargets { - assertExplainTargetCase(t, description, target, resultantData) + assertExplainTargetCase(s, target, resultantData) } } } func assertExplainTargetCase( - t *testing.T, - description string, + s *state, targetCase PlanNodeTargetCase, actualResults []map[string]any, ) { @@ -217,17 +210,18 @@ func assertExplainTargetCase( if !isFound { assert.Fail( - t, + s.t, "Expected target ["+targetCase.TargetNodeName+"], was not found in the explain graph.", - description, + s.testCase.Description, ) } - assert.Equal( - t, + assertResultsEqual( + s.t, + s.clientType, targetCase.ExpectedAttributes, foundActualTarget, - description, + s.testCase.Description, ) } } @@ -312,24 +306,41 @@ func findTargetNode( } } + case []any: + return findTargetNodeFromArray(targetName, toSkip, includeChildNodes, r) + case []map[string]any: - for _, item := range r { - target, matches, found := findTargetNode( - targetName, - toSkip, - includeChildNodes, - item, - ) + return findTargetNodeFromArray(targetName, toSkip, includeChildNodes, r) + } - totalMatchedSoFar = totalMatchedSoFar + matches - toSkip -= matches + return nil, totalMatchedSoFar, false +} - if found { - if includeChildNodes { - return target, totalMatchedSoFar, true - } - return trimSubNodes(target), totalMatchedSoFar, true +// findTargetNodeFromArray is a helper that runs findTargetNode for each item in an array. +func findTargetNodeFromArray[T any]( + targetName string, + toSkip uint, + includeChildNodes bool, + actualResult []T, +) (any, uint, bool) { + var totalMatchedSoFar uint = 0 + + for _, item := range actualResult { + target, matches, found := findTargetNode( + targetName, + toSkip, + includeChildNodes, + item, + ) + + totalMatchedSoFar = totalMatchedSoFar + matches + toSkip -= matches + + if found { + if includeChildNodes { + return target, totalMatchedSoFar, true } + return trimSubNodes(target), totalMatchedSoFar, true } } @@ -358,9 +369,9 @@ func trimSubNodes(graph any) any { func trimExplainAttributes( t *testing.T, description string, - actualResult map[string]any, + actualResult any, ) map[string]any { - trimmedMap := copyMap(actualResult) + trimmedMap := copyMap(actualResult.(map[string]any)) for key, value := range trimmedMap { if !isPlanNode(key) { @@ -373,14 +384,10 @@ func trimExplainAttributes( trimmedMap[key] = trimExplainAttributes(t, description, v) case []map[string]any: - trimmedArrayElements := []map[string]any{} - for _, valueItem := range v { - trimmedArrayElements = append( - trimmedArrayElements, - trimExplainAttributes(t, description, valueItem), - ) - } - trimmedMap[key] = trimmedArrayElements + trimmedMap[key] = trimExplainAttributesArray(t, description, v) + + case []any: + trimmedMap[key] = trimExplainAttributesArray(t, description, v) default: assert.Fail( @@ -394,6 +401,22 @@ func trimExplainAttributes( return trimmedMap } +// trimExplainAttributesArray is a helper that runs trimExplainAttributes for each item in an array. +func trimExplainAttributesArray[T any]( + t *testing.T, + description string, + actualResult []T, +) []map[string]any { + trimmedArrayElements := []map[string]any{} + for _, valueItem := range actualResult { + trimmedArrayElements = append( + trimmedArrayElements, + trimExplainAttributes(t, description, valueItem), + ) + } + return trimmedArrayElements +} + // isPlanNode returns true if someName matches a plan node name, retruns false otherwise. func isPlanNode(someName string) bool { _, isPlanNode := allPlanNodeNames[someName] diff --git a/tests/integration/explain/default/with_average_join_test.go b/tests/integration/explain/default/with_average_join_test.go index a48a1b97d2..265ca932ce 100644 --- a/tests/integration/explain/default/with_average_join_test.go +++ b/tests/integration/explain/default/with_average_join_test.go @@ -347,3 +347,49 @@ func TestDefaultExplainRequestWithAverageOnMultipleJoinedFieldsWithFilter(t *tes explainUtils.ExecuteTestCase(t, test) } + +// This test asserts that only a single index join is used (not parallelNode) because the +// _avg reuses the rendered join as they have matching filters (average adds a ne nil filter). +func TestDefaultExplainRequestOneToManyWithAverageAndChildNeNilFilterSharesJoinField(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) 1-to-M relation request from many side with average filter shared.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + _avg(books: {field: rating}) + books(filter: {rating: {_ne: null}}){ + name + } + } + }`, + + ExpectedPatterns: []dataMap{ + { + "explain": dataMap{ + "selectTopNode": dataMap{ + "averageNode": dataMap{ + "countNode": dataMap{ + "sumNode": dataMap{ + "selectNode": dataMap{ + "typeIndexJoin": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/default/with_count_join_test.go b/tests/integration/explain/default/with_count_join_test.go index 6c116529a7..3f7802820d 100644 --- a/tests/integration/explain/default/with_count_join_test.go +++ b/tests/integration/explain/default/with_count_join_test.go @@ -260,3 +260,94 @@ func TestDefaultExplainRequestWithCountOnOneToManyJoinedFieldWithManySources(t * explainUtils.ExecuteTestCase(t, test) } + +// This test asserts that only a single index join is used (not parallelNode) because the +// _count reuses the rendered join as they have matching filters. +func TestDefaultExplainRequestOneToManyWithCountWithFilterAndChildFilterSharesJoinField(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) 1-to-M relation request from many side with count filter shared.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + _count(books: {filter: {rating: {_ne: null}}}) + books(filter: {rating: {_ne: null}}){ + name + } + } + }`, + + ExpectedPatterns: []dataMap{ + { + "explain": dataMap{ + "selectTopNode": dataMap{ + "countNode": dataMap{ + "selectNode": dataMap{ + "typeIndexJoin": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +// This test asserts that two joins are used (with parallelNode) because _count cannot +// reuse the rendered join as they dont have matching filters. +func TestDefaultExplainRequestOneToManyWithCountAndChildFilterDoesNotShareJoinField(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) 1-to-M relation request from many side with count filter not shared.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + _count(books: {}) + books(filter: {rating: {_ne: null}}){ + name + } + } + }`, + + ExpectedPatterns: []dataMap{ + { + "explain": dataMap{ + "selectTopNode": dataMap{ + "countNode": dataMap{ + "selectNode": dataMap{ + "parallelNode": []dataMap{ + { + "typeIndexJoin": normalTypeJoinPattern, + }, + { + "typeIndexJoin": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/fixture.go b/tests/integration/explain/fixture.go index 31b819e650..c531d95a84 100644 --- a/tests/integration/explain/fixture.go +++ b/tests/integration/explain/fixture.go @@ -27,6 +27,7 @@ var SchemaForExplainTests = testUtils.SchemaUpdate{ type Book { name: String author: Author + rating: Float pages: Int chapterPages: [Int!] } diff --git a/tests/integration/lens.go b/tests/integration/lens.go index dbdb4c1c70..317864ab3e 100644 --- a/tests/integration/lens.go +++ b/tests/integration/lens.go @@ -13,6 +13,7 @@ package tests import ( "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" ) @@ -72,10 +73,37 @@ func getMigrations( for _, node := range getNodes(action.NodeID, s.nodes) { db := getStore(s, node.DB, action.TransactionID, "") - configs := db.LensRegistry().Config() + configs, err := db.LensRegistry().Config(s.ctx) + require.NoError(s.t, err) + require.Equal(s.t, len(configs), len(action.ExpectedResults)) // The order of the results is not deterministic, so do not assert on the element - // locations. - assert.ElementsMatch(s.t, configs, action.ExpectedResults) + for _, expected := range action.ExpectedResults { + var actual client.LensConfig + var actualFound bool + + for _, config := range configs { + if config.SourceSchemaVersionID != expected.SourceSchemaVersionID { + continue + } + if config.DestinationSchemaVersionID != expected.DestinationSchemaVersionID { + continue + } + actual = config + actualFound = true + } + + require.True(s.t, actualFound, "matching lens config not found") + require.Equal(s.t, len(expected.Lenses), len(actual.Lenses)) + + for j, actualLens := range actual.Lenses { + expectedLens := expected.Lenses[j] + + assert.Equal(s.t, expectedLens.Inverse, actualLens.Inverse) + assert.Equal(s.t, expectedLens.Path, actualLens.Path) + + assertResultsEqual(s.t, s.clientType, expectedLens.Arguments, actualLens.Arguments) + } + } } } diff --git a/tests/integration/mutation/one_to_many/utils.go b/tests/integration/mutation/create/field_kinds/one_to_many/utils.go similarity index 93% rename from tests/integration/mutation/one_to_many/utils.go rename to tests/integration/mutation/create/field_kinds/one_to_many/utils.go index 2b81a7321e..21b9524567 100644 --- a/tests/integration/mutation/one_to_many/utils.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/utils.go @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func ExecuteTestCase(t *testing.T, test testUtils.TestCase) { +func executeTestCase(t *testing.T, test testUtils.TestCase) { testUtils.ExecuteTestCase( t, testUtils.TestCase{ diff --git a/tests/integration/mutation/one_to_many/create/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go similarity index 59% rename from tests/integration/mutation/one_to_many/create/with_alias_test.go rename to tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go index 990ae8f653..6fadbc5d85 100644 --- a/tests/integration/mutation/one_to_many/create/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go @@ -8,48 +8,46 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package create +package one_to_many import ( "fmt" "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" - fixture "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_many" ) func TestMutationCreateOneToMany_AliasedRelationNameWithInvalidField_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation, with an invalid field, with alias.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"notName\": \"Painted House\",\"author\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } + testUtils.CreateDoc{ + Doc: `{ + "notName": "Painted House", + "author": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, ExpectedError: "The given field does not exist. Name: notName", }, }, } - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToMany_AliasedRelationNameNonExistingRelationSingleSide_NoIDFieldError(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation, non-existing id, from the single side, no id relation field, with alias.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\",\"published\": \"bae--b44b-e5c2af3e374d\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John Grisham", + "published": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, ExpectedError: "The given field does not exist. Name: published", }, }, } - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } // Note: This test should probably not pass, as it contains a @@ -58,9 +56,16 @@ func TestMutationCreateOneToMany_AliasedRelationNameNonExistingRelationManySide_ test := testUtils.TestCase{ Description: "One to many create mutation, non-existing id, from the many side, with alias", Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House", + "author": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { + Request: `query { + Book { name } }`, @@ -72,42 +77,34 @@ func TestMutationCreateOneToMany_AliasedRelationNameNonExistingRelationManySide_ }, }, } - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } - -func TestMutationCreateOneToMany_AliasedRelationNamToLinkFromSingleSide_NoIDFieldError(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" - +func TestMutationCreateOneToMany_AliasedRelationNameInvalidIDManySide_CreatedDoc(t *testing.T) { test := testUtils.TestCase{ - Description: "One to many create mutation with relation id from single side, with alias.", + Description: "One to many create mutation, invalid id, from the many side, with alias", Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House", + "author": "ValueDoesntMatter" + }`, + }, testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"name\": \"Painted House\"}") { - _key + Request: `query { + Book { + name } }`, Results: []map[string]any{ { - "_key": bookKey, + "name": "Painted House", }, }, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Author(data: "{\"name\": \"John Grisham\",\"published\": \"%s\"}") { - name - } - }`, - bookKey, - ), - ExpectedError: "The given field does not exist. Name: published", - }, }, } - - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testing.T) { @@ -116,32 +113,21 @@ func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testin test := testUtils.TestCase{ Description: "One to many create mutation using relation id from many side, with alias.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" }`, - Results: []map[string]any{ - { - "_key": authorKey, - }, - }, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" }`, authorKey, ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, }, testUtils.Request{ Request: `query { @@ -184,7 +170,7 @@ func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testin }, } - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDocID(t *testing.T) { @@ -195,38 +181,37 @@ func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDo nonAliasedTest := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": authorKey, - }, - }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, authorKey, ), + }, + testUtils.Request{ + Request: `query { + Book { + _key + } + }`, Results: []map[string]any{ { "_key": bookKey, // Must be same as below. - "name": "Painted House", }, }, }, }, } - fixture.ExecuteTestCase(t, nonAliasedTest) + executeTestCase(t, nonAliasedTest) // Check that `bookKey` is same in both above and the alised version below. // Note: Everything should be same, only diff should be the use of alias. @@ -234,36 +219,35 @@ func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDo aliasedTest := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": authorKey, - }, - }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, authorKey, ), + }, + testUtils.Request{ + Request: `query { + Book { + _key + } + }`, Results: []map[string]any{ { - "_key": bookKey, // Must be same as below. - "name": "Painted House", + "_key": bookKey, // Must be same as above. }, }, }, }, } - fixture.ExecuteTestCase(t, aliasedTest) + executeTestCase(t, aliasedTest) } diff --git a/tests/integration/mutation/one_to_many/create/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_many/with_simple_test.go similarity index 56% rename from tests/integration/mutation/one_to_many/create/with_simple_test.go rename to tests/integration/mutation/create/field_kinds/one_to_many/with_simple_test.go index a2c3ad3545..e72d7d218e 100644 --- a/tests/integration/mutation/one_to_many/create/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/with_simple_test.go @@ -8,48 +8,46 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package create +package one_to_many import ( "fmt" "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" - fixture "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_many" ) func TestMutationCreateOneToMany_WithInvalidField_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation, with an invalid field.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"notName\": \"Painted House\",\"author_id\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } + testUtils.CreateDoc{ + Doc: `{ + "notName": "Painted House", + "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, ExpectedError: "The given field does not exist. Name: notName", }, }, } - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToMany_NonExistingRelationSingleSide_NoIDFieldError(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation, non-existing id, from the single side, no id relation field.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\",\"published_id\": \"bae--b44b-e5c2af3e374d\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John Grisham", + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, ExpectedError: "The given field does not exist. Name: published_id", }, }, } - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } // Note: This test should probably not pass, as it contains a @@ -58,56 +56,28 @@ func TestMutationCreateOneToMany_NonExistingRelationManySide_CreatedDoc(t *testi test := testUtils.TestCase{ Description: "One to many create mutation, non-existing id, from the many side", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House", + "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, }, - }, - } - fixture.ExecuteTestCase(t, test) -} - -func TestMutationCreateOneToMany_RelationIDToLinkFromSingleSide_NoIDFieldError(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" - - test := testUtils.TestCase{ - Description: "One to many create mutation with relation id from single side.", - Actions: []any{ testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"name\": \"Painted House\"}") { - _key + Request: `query { + Book { + name } }`, Results: []map[string]any{ { - "_key": bookKey, + "name": "Painted House", }, }, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Author(data: "{\"name\": \"John Grisham\",\"published_id\": \"%s\"}") { - name - } - }`, - bookKey, - ), - ExpectedError: "The given field does not exist. Name: published_id", - }, }, } - - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { @@ -116,32 +86,21 @@ func TestMutationCreateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation using relation id from many side", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" }`, - Results: []map[string]any{ - { - "_key": authorKey, - }, - }, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" }`, authorKey, ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, }, testUtils.Request{ Request: `query { @@ -184,5 +143,5 @@ func TestMutationCreateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { }, } - fixture.ExecuteTestCase(t, test) + executeTestCase(t, test) } diff --git a/tests/integration/mutation/one_to_one/utils.go b/tests/integration/mutation/create/field_kinds/one_to_one/utils.go similarity index 93% rename from tests/integration/mutation/one_to_one/utils.go rename to tests/integration/mutation/create/field_kinds/one_to_one/utils.go index f960b3ba2c..4b5d33f618 100644 --- a/tests/integration/mutation/one_to_one/utils.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/utils.go @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func ExecuteTestCase(t *testing.T, test testUtils.TestCase) { +func executeTestCase(t *testing.T, test testUtils.TestCase) { testUtils.ExecuteTestCase( t, testUtils.TestCase{ diff --git a/tests/integration/mutation/one_to_one/create/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go similarity index 67% rename from tests/integration/mutation/one_to_one/create/with_alias_test.go rename to tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go index 9543795ead..1e491e3626 100644 --- a/tests/integration/mutation/one_to_one/create/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go @@ -8,31 +8,30 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package create +package one_to_one import ( "fmt" "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_one" ) func TestMutationCreateOneToOne_UseAliasWithInvalidField_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation, alias relation, with an invalid field.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"notName\": \"John Grisham\",\"published\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } - }`, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "notName": "John Grisham", + "published": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, ExpectedError: "The given field does not exist. Name: notName", }, }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } // Note: This test should probably not pass, as it contains a @@ -41,12 +40,19 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationPrimarySide_Creat test := testUtils.TestCase{ Description: "One to one create mutation, alias relation, from the wrong side", Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham", + "published": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\",\"published\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } - }`, + Request: `query { + Author { + name + } + }`, Results: []map[string]any{ { "name": "John Grisham", @@ -55,24 +61,24 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationPrimarySide_Creat }, }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationSecondarySide_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation, alias relation, from the secondary side", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House", + "author": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, ExpectedError: "no document for the given key exists", }, }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySide(t *testing.T) { @@ -81,32 +87,21 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySid test := testUtils.TestCase{ Description: "One to one create mutation with an alias relation.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"name\": \"Painted House\"}") { - _key - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House" }`, - Results: []map[string]any{ - { - "_key": bookKey, - }, - }, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Author(data: "{\"name\": \"John Grisham\",\"published\": \"%s\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 1, + Doc: fmt.Sprintf( + `{ + "name": "John Grisham", + "published": "%s" }`, bookKey, ), - Results: []map[string]any{ - { - "name": "John Grisham", - }, - }, }, testUtils.Request{ Request: `query { @@ -147,7 +142,7 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySid }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondarySide(t *testing.T) { @@ -156,32 +151,21 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondaryS test := testUtils.TestCase{ Description: "One to one create mutation from secondary side with alias relation.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" }`, - Results: []map[string]any{ - { - "_key": authorKey, - }, - }, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" }`, authorKey, ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, }, testUtils.Request{ Request: `query { @@ -222,5 +206,5 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondaryS }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } diff --git a/tests/integration/mutation/one_to_one/create/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go similarity index 54% rename from tests/integration/mutation/one_to_one/create/with_simple_test.go rename to tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go index bc37265dd2..d22a77de6c 100644 --- a/tests/integration/mutation/one_to_one/create/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go @@ -8,31 +8,30 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package create +package one_to_one import ( "fmt" "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_one" ) func TestMutationCreateOneToOne_WithInvalidField_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation, with an invalid field.", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"notName\": \"John Grisham\",\"published_id\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } - }`, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "notName": "John Grisham", + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, ExpectedError: "The given field does not exist. Name: notName", }, }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } // Note: This test should probably not pass, as it contains a @@ -41,12 +40,19 @@ func TestMutationCreateOneToOneNoChild(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation, from the wrong side", Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham", + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\",\"published_id\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } - }`, + Request: `query { + Author { + name + } + }`, Results: []map[string]any{ { "name": "John Grisham", @@ -55,24 +61,24 @@ func TestMutationCreateOneToOneNoChild(t *testing.T) { }, }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToOne_NonExistingRelationSecondarySide_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation, from the secondary side", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House", + "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, ExpectedError: "no document for the given key exists", }, }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToOne(t *testing.T) { @@ -81,32 +87,21 @@ func TestMutationCreateOneToOne(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Book(data: "{\"name\": \"Painted House\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": bookKey, - }, - }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House" + }`, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Author(data: "{\"name\": \"John Grisham\",\"published_id\": \"%s\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 1, + Doc: fmt.Sprintf( + `{ + "name": "John Grisham", + "published_id": "%s" }`, bookKey, ), - Results: []map[string]any{ - { - "name": "John Grisham", - }, - }, }, testUtils.Request{ Request: ` @@ -149,7 +144,7 @@ func TestMutationCreateOneToOne(t *testing.T) { }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) } func TestMutationCreateOneToOneSecondarySide(t *testing.T) { @@ -158,32 +153,21 @@ func TestMutationCreateOneToOneSecondarySide(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation from secondary side", Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": authorKey, - }, - }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - name - } + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" }`, authorKey, ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, }, testUtils.Request{ Request: ` @@ -226,5 +210,79 @@ func TestMutationCreateOneToOneSecondarySide(t *testing.T) { }, } - simpleTests.ExecuteTestCase(t, test) + executeTestCase(t, test) +} + +func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary(t *testing.T) { + bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + + test := testUtils.TestCase{ + Description: "One to one create mutation, errors due to link already existing, primary side", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: fmt.Sprintf(`{ + "name": "John Grisham", + "published_id": "%s" + }`, + bookKey, + ), + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: fmt.Sprintf(`{ + "name": "Saadi Shirazi", + "published_id": "%s" + }`, + bookKey, + ), + ExpectedError: "target document is already linked to another document.", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaSecondary(t *testing.T) { + authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + + test := testUtils.TestCase{ + Description: "One to one create mutation, errors due to link already existing, secondary side", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Painted House", + "author_id": "%s" + }`, + authorKey, + ), + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Golestan", + "author_id": "%s" + }`, + authorKey, + ), + ExpectedError: "target document is already linked to another document.", + }, + }, + } + + executeTestCase(t, test) } diff --git a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go new file mode 100644 index 0000000000..9fce31fdb2 --- /dev/null +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/utils.go @@ -0,0 +1,54 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one_to_one + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func execute(t *testing.T, test testUtils.TestCase) { + testUtils.ExecuteTestCase( + t, + testUtils.TestCase{ + Description: test.Description, + Actions: append( + []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + rating: Float + author: Author + publisher: Publisher + } + + type Author { + name: String + age: Int + verified: Boolean + wrote: Book @primary + } + + type Publisher { + name: String + address: String + published: Book + } + `, + }, + }, + test.Actions..., + ), + }, + ) +} diff --git a/tests/integration/mutation/relation/create/with_txn_test.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go similarity index 97% rename from tests/integration/mutation/relation/create/with_txn_test.go rename to tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go index d48df2dfd4..0cc3807ddc 100644 --- a/tests/integration/mutation/relation/create/with_txn_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package relation_create +package one_to_one_to_one import ( "testing" @@ -16,7 +16,6 @@ import ( "github.com/sourcenetwork/immutable" testUtils "github.com/sourcenetwork/defradb/tests/integration" - relationTests "github.com/sourcenetwork/defradb/tests/integration/mutation/relation" ) func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing.T) { @@ -168,7 +167,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. }, } - relationTests.Execute(t, test) + execute(t, test) } func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing.T) { @@ -308,5 +307,5 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing }, } - relationTests.Execute(t, test) + execute(t, test) } diff --git a/tests/integration/mutation/create/simple_test.go b/tests/integration/mutation/create/simple_test.go new file mode 100644 index 0000000000..e1f4aa6d01 --- /dev/null +++ b/tests/integration/mutation/create/simple_test.go @@ -0,0 +1,158 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationCreate_GivenNonExistantField_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation with non existant field", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "fieldDoesNotExist": 27 + }`, + ExpectedError: "The given field does not exist. Name: fieldDoesNotExist", + }, + testUtils.Request{ + // Ensure that no documents have been written. + Request: ` + query { + Users { + name + } + } + `, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 27 + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + _key + name + age + } + } + `, + Results: []map[string]any{ + { + "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": uint64(27), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_GivenDuplicate_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation where document already exists.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // Collection.Save would treat the second create as an update, and so + // is excluded from this test. + testUtils.CollectionNamedMutationType, + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 27 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 27 + }`, + ExpectedError: "a document with the given dockey already exists.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_GivenEmptyData_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation with empty data param.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + create_Users(data: "") { + _key + } + }`, + ExpectedError: "given data payload is empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go new file mode 100644 index 0000000000..6c28e898f7 --- /dev/null +++ b/tests/integration/mutation/create/with_version_test.go @@ -0,0 +1,52 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationCreate_ReturnsVersionCID(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with version cid", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + create_Users(data: "{\"name\": \"John\"}") { + _version { + cid + } + } + }`, + Results: []map[string]any{ + { + "_version": []map[string]any{ + { + "cid": "bafybeifugdzbm7y3eihxe7wbldyesxeh6s6m62ghvwipphtld547rfi4cu", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/one_to_many/delete/with_show_deleted_test.go b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go similarity index 89% rename from tests/integration/mutation/one_to_many/delete/with_show_deleted_test.go rename to tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go index 9b97fb3a0f..6969ea3c6f 100644 --- a/tests/integration/mutation/one_to_many/delete/with_show_deleted_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package delete +package one_to_many import ( "fmt" @@ -18,7 +18,6 @@ import ( "github.com/sourcenetwork/defradb/client" testUtils "github.com/sourcenetwork/defradb/tests/integration" - fixture "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_many" ) func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testing.T) { @@ -48,6 +47,20 @@ func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testin test := testUtils.TestCase{ Description: "One to many delete document using single key show deleted.", Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + rating: Float + author: Author + } + type Author { + name: String + age: Int + published: [Book] + } + `, + }, testUtils.CreateDoc{ CollectionID: 1, Doc: jsonString1, @@ -108,5 +121,5 @@ func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testin }, } - fixture.ExecuteTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } diff --git a/tests/integration/mutation/relation/utils.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go similarity index 92% rename from tests/integration/mutation/relation/utils.go rename to tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go index 610124122c..89f0e497f4 100644 --- a/tests/integration/mutation/relation/utils.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/utils.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package relation +package one_to_one_to_one import ( "testing" @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func Execute(t *testing.T, test testUtils.TestCase) { +func execute(t *testing.T, test testUtils.TestCase) { testUtils.ExecuteTestCase( t, testUtils.TestCase{ diff --git a/tests/integration/mutation/relation/delete/single_id_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go similarity index 95% rename from tests/integration/mutation/relation/delete/single_id_test.go rename to tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go index da33ca5a6e..851ec73da0 100644 --- a/tests/integration/mutation/relation/delete/single_id_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -8,13 +8,12 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package relation_delete +package one_to_one_to_one import ( "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" - relationTests "github.com/sourcenetwork/defradb/tests/integration/mutation/relation" ) func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { @@ -182,6 +181,6 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { } for _, test := range tests { - relationTests.Execute(t, test) + execute(t, test) } } diff --git a/tests/integration/mutation/relation/delete/with_txn_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go similarity index 96% rename from tests/integration/mutation/relation/delete/with_txn_test.go rename to tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go index aa5491417f..166642ae13 100644 --- a/tests/integration/mutation/relation/delete/with_txn_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package relation_delete +package one_to_one_to_one import ( "testing" @@ -16,7 +16,6 @@ import ( "github.com/sourcenetwork/immutable" testUtils "github.com/sourcenetwork/defradb/tests/integration" - relationTests "github.com/sourcenetwork/defradb/tests/integration/mutation/relation" ) func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { @@ -82,7 +81,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { }, } - relationTests.Execute(t, test) + execute(t, test) } func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { @@ -142,7 +141,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { }, } - relationTests.Execute(t, test) + execute(t, test) } func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *testing.T) { @@ -232,7 +231,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes }, } - relationTests.Execute(t, test) + execute(t, test) } func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *testing.T) { @@ -316,7 +315,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te }, } - relationTests.Execute(t, test) + execute(t, test) } func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) { @@ -377,7 +376,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) }, } - relationTests.Execute(t, test) + execute(t, test) } func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T) { @@ -444,5 +443,5 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T }, } - relationTests.Execute(t, test) + execute(t, test) } diff --git a/tests/integration/mutation/delete/simple_test.go b/tests/integration/mutation/delete/simple_test.go new file mode 100644 index 0000000000..5b28d100d3 --- /dev/null +++ b/tests/integration/mutation/delete/simple_test.go @@ -0,0 +1,65 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithoutSubSelection(t *testing.T) { + test := testUtils.TestCase{ + Description: "Delete multiple documents that exist without sub selection, should give error.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + delete_User + }`, + ExpectedError: "Field \"delete_User\" of type \"[User]\" must have a sub selection.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithoutSubSelectionFields(t *testing.T) { + test := testUtils.TestCase{ + Description: "Delete multiple documents that exist without _key sub-selection.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + delete_User{ + + } + }`, + ExpectedError: "Syntax Error GraphQL request (2:17) Unexpected empty IN {}", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_deleted_field_test.go b/tests/integration/mutation/delete/with_deleted_field_test.go new file mode 100644 index 0000000000..25784b52d2 --- /dev/null +++ b/tests/integration/mutation/delete/with_deleted_field_test.go @@ -0,0 +1,56 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// This test documents a bug, see: +// https://github.com/sourcenetwork/defradb/issues/1846 +func TestMutationDeletion_WithoDeletedField(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(id: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { + _deleted + _key + } + }`, + Results: []map[string]any{ + { + // This should be true, as it has been deleted. + "_deleted": false, + "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_filter_test.go b/tests/integration/mutation/delete/with_filter_test.go new file mode 100644 index 0000000000..70d4550be9 --- /dev/null +++ b/tests/integration/mutation/delete/with_filter_test.go @@ -0,0 +1,207 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Delete using filter - One matching document, that exists.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(filter: {name: {_eq: "Shahzad"}}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithFilterMatchingMultipleDocs(t *testing.T) { + test := testUtils.TestCase{ + Description: "Delete using filter - Multiple matching documents that exist.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "age": 1 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "age": 2 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 3 + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(filter: {name: {_eq: "Shahzad"}}) { + age + } + }`, + Results: []map[string]any{ + { + "age": uint64(2), + }, + { + "age": uint64(1), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithEmptyFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Delete using filter - Match everything in this collection.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(filter: {}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Fred", + }, + { + "name": "Shahzad", + }, + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithFilterNoMatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "No delete with filter: because no document matches filter.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(filter: {name: {_eq: "Lone"}}) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithFilterOnEmptyCollection(t *testing.T) { + test := testUtils.TestCase{ + Description: "No delete with filter: because the collection is empty.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + delete_User(filter: {name: {_eq: "Lone"}}) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_id_alias_test.go b/tests/integration/mutation/delete/with_id_alias_test.go new file mode 100644 index 0000000000..1890092b00 --- /dev/null +++ b/tests/integration/mutation/delete/with_id_alias_test.go @@ -0,0 +1,51 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithIDAndAlias(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple delete mutation with an aliased _key name.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + fancyKey: _key + } + }`, + Results: []map[string]any{ + { + "fancyKey": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_id_test.go b/tests/integration/mutation/delete/with_id_test.go new file mode 100644 index 0000000000..68adcc7e6f --- /dev/null +++ b/tests/integration/mutation/delete/with_id_test.go @@ -0,0 +1,72 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithIDUnknownValue(t *testing.T) { + test := testUtils.TestCase{ + Description: "Deletion using id that doesn't exist, where the collection is empty.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _key + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithIDUnknownValueAndUnrelatedRecordInCollection(t *testing.T) { + test := testUtils.TestCase{ + Description: "Deletion using id that doesn't exist, where the collection is non-empty.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Not the id below" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _key + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_id_txn_test.go b/tests/integration/mutation/delete/with_id_txn_test.go new file mode 100644 index 0000000000..c4f2ad6bdc --- /dev/null +++ b/tests/integration/mutation/delete/with_id_txn_test.go @@ -0,0 +1,63 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithIDAndTxn(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple delete mutation where one element exists.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.Request{ + TransactionID: immutable.Some(0), + Request: `mutation { + delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _key + } + }`, + Results: []map[string]any{ + { + "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + }, + }, + }, + testUtils.Request{ + TransactionID: immutable.Some(0), + Request: `query { + User { + _key + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_ids_alias_test.go b/tests/integration/mutation/delete/with_ids_alias_test.go new file mode 100644 index 0000000000..1c6be23278 --- /dev/null +++ b/tests/integration/mutation/delete/with_ids_alias_test.go @@ -0,0 +1,68 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithIDsAndSelectAlias(t *testing.T) { + test := testUtils.TestCase{ + Description: "Delete multiple documents that exist, when given multiple keys with alias.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + points: Float + verified: Boolean + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "age": 26, + "points": 48.48, + "verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 26, + "points": 48.48, + "verified": true + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { + AliasKey: _key + } + }`, + Results: []map[string]any{ + { + "AliasKey": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", + }, + { + "AliasKey": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_ids_filter_test.go b/tests/integration/mutation/delete/with_ids_filter_test.go new file mode 100644 index 0000000000..8d93bdf9cf --- /dev/null +++ b/tests/integration/mutation/delete/with_ids_filter_test.go @@ -0,0 +1,51 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithIDsAndEmptyFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Deletion of using ids and filter, known id and empty filter.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"], filter: {}) { + _key + } + }`, + Results: []map[string]any{ + { + "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_ids_test.go b/tests/integration/mutation/delete/with_ids_test.go new file mode 100644 index 0000000000..48adcb2e48 --- /dev/null +++ b/tests/integration/mutation/delete/with_ids_test.go @@ -0,0 +1,194 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithIDs(t *testing.T) { + test := testUtils.TestCase{ + Description: "Delete multiple documents that exist, when given multiple keys.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { + _key + } + }`, + Results: []map[string]any{ + { + "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + }, + { + "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithEmptyIDs(t *testing.T) { + test := testUtils.TestCase{ + Description: "Deletion of using ids, empty ids set.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: []) { + _key + } + }`, + Results: []map[string]any{}, + }, + testUtils.Request{ + // Make sure no documents have been deleted + Request: `query { + User { + name + } + }`, + Results: []map[string]any{ + { + "name": "Shahzad", + }, + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithIDsSingleUnknownID(t *testing.T) { + test := testUtils.TestCase{ + Description: "Deletion of using ids, single unknown item.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507e"]) { + _key + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithIDsMultipleUnknownID(t *testing.T) { + test := testUtils.TestCase{ + Description: "Deletion of using ids, single unknown item.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-028383cc-d6ba-5df7-959f-2bdce3536a05", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { + _key + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDeletion_WithIDsKnownAndUnknown(t *testing.T) { + test := testUtils.TestCase{ + Description: "Deletion of using ids, known and unknown items.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad" + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { + _key + } + }`, + Results: []map[string]any{ + { + "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_ids_txn_test.go b/tests/integration/mutation/delete/with_ids_txn_test.go new file mode 100644 index 0000000000..ab3ed174f1 --- /dev/null +++ b/tests/integration/mutation/delete/with_ids_txn_test.go @@ -0,0 +1,69 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithIDsAndTxn(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple multi-key delete mutation with one key that exists and txn.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + points: Float + verified: Boolean + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "age": 26, + "points": 48.48, + "verified": true + }`, + }, + testUtils.Request{ + TransactionID: immutable.Some(0), + Request: `mutation { + delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { + _key + } + }`, + Results: []map[string]any{ + { + "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + }, + }, + }, + testUtils.Request{ + TransactionID: immutable.Some(0), + Request: `query { + User(dockeys: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { + _key + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_ids_update_alias_test.go b/tests/integration/mutation/delete/with_ids_update_alias_test.go new file mode 100644 index 0000000000..a76dccffe2 --- /dev/null +++ b/tests/integration/mutation/delete/with_ids_update_alias_test.go @@ -0,0 +1,76 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDeletion_WithUpdateAndIDsAndSelectAlias(t *testing.T) { + test := testUtils.TestCase{ + Description: "Delete multiple documents that exist, when given multiple keys with alias after update.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + points: Float + verified: Boolean + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "age": 26, + "points": 48.48, + "verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 26, + "points": 48.48, + "verified": true + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "age": 27, + "points": 48.2, + "verified": false + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { + AliasKey: _key + } + }`, + Results: []map[string]any{ + { + "AliasKey": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", + }, + { + "AliasKey": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/inline_array/update/simple_test.go b/tests/integration/mutation/inline_array/update/simple_test.go deleted file mode 100644 index f89d74c734..0000000000 --- a/tests/integration/mutation/inline_array/update/simple_test.go +++ /dev/null @@ -1,682 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "testing" - - "github.com/sourcenetwork/immutable" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - inlineArray "github.com/sourcenetwork/defradb/tests/integration/mutation/inline_array" -) - -func TestMutationInlineArrayUpdateWithBooleans(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Simple update mutation with boolean array, replace with nil", - Request: `mutation { - update_Users(data: "{\"likedIndexes\": null}") { - name - likedIndexes - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "likedIndexes": [true, true, false, true] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "likedIndexes": nil, - }, - }, - }, - { - Description: "Simple update mutation with boolean array, replace with empty", - Request: `mutation { - update_Users(data: "{\"likedIndexes\": []}") { - name - likedIndexes - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "likedIndexes": [true, true, false, true] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "likedIndexes": []bool{}, - }, - }, - }, - { - Description: "Simple update mutation with boolean array, replace with same size", - Request: `mutation { - update_Users(data: "{\"likedIndexes\": [true, false, true, false]}") { - name - likedIndexes - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "likedIndexes": [true, true, false, true] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "likedIndexes": []bool{true, false, true, false}, - }, - }, - }, - { - Description: "Simple update mutation with boolean array, replace with smaller size", - Request: `mutation { - update_Users(data: "{\"likedIndexes\": [false, true]}") { - name - likedIndexes - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "likedIndexes": [true, true, false, true] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "likedIndexes": []bool{false, true}, - }, - }, - }, - { - Description: "Simple update mutation with boolean array, replace with larger size", - Request: `mutation { - update_Users(data: "{\"likedIndexes\": [true, false, true, false, true, true]}") { - name - likedIndexes - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "likedIndexes": [true, true, false, true] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "likedIndexes": []bool{true, false, true, false, true, true}, - }, - }, - }, - } - - for _, test := range tests { - inlineArray.ExecuteTestCase(t, test) - } -} - -func TestMutationInlineArrayWithNillableBooleans(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple inline array with no filter, booleans", - Request: `mutation { - update_Users(data: "{\"indexLikesDislikes\": [true, true, false, true, null]}") { - name - indexLikesDislikes - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "indexLikesDislikes": [true, true, false, true] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "indexLikesDislikes": []immutable.Option[bool]{ - immutable.Some(true), - immutable.Some(true), - immutable.Some(false), - immutable.Some(true), - immutable.None[bool](), - }, - }, - }, - } - - inlineArray.ExecuteTestCase(t, test) -} - -func TestMutationInlineArrayUpdateWithIntegers(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Simple update mutation with integer array, replace with nil", - Request: `mutation { - update_Users(data: "{\"favouriteIntegers\": null}") { - name - favouriteIntegers - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteIntegers": [1, 2, 3, 5, 8] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteIntegers": nil, - }, - }, - }, - { - Description: "Simple update mutation with integer array, replace with empty", - Request: `mutation { - update_Users(data: "{\"favouriteIntegers\": []}") { - name - favouriteIntegers - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteIntegers": [1, 2, 3, 5, 8] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteIntegers": []int64{}, - }, - }, - }, - { - Description: "Simple update mutation with integer array, replace with same size, positive values", - Request: `mutation { - update_Users(data: "{\"favouriteIntegers\": [8, 5, 3, 2, 1]}") { - name - favouriteIntegers - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteIntegers": [1, 2, 3, 5, 8] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteIntegers": []int64{8, 5, 3, 2, 1}, - }, - }, - }, - { - Description: "Simple update mutation with integer array, replace with same size, positive to mixed values", - Request: `mutation { - update_Users(data: "{\"favouriteIntegers\": [-1, 2, -3, 5, -8]}") { - name - favouriteIntegers - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteIntegers": [1, 2, 3, 5, 8] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteIntegers": []int64{-1, 2, -3, 5, -8}, - }, - }, - }, - { - Description: "Simple update mutation with integer array, replace with smaller size, positive values", - Request: `mutation { - update_Users(data: "{\"favouriteIntegers\": [1, 2, 3]}") { - name - favouriteIntegers - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteIntegers": [1, 2, 3, 5, 8] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteIntegers": []int64{1, 2, 3}, - }, - }, - }, - { - Description: "Simple update mutation with integer array, replace with larger size, positive values", - Request: `mutation { - update_Users(data: "{\"favouriteIntegers\": [1, 2, 3, 5, 8, 13, 21]}") { - name - favouriteIntegers - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteIntegers": [1, 2, 3, 5, 8] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteIntegers": []int64{1, 2, 3, 5, 8, 13, 21}, - }, - }, - }, - } - - for _, test := range tests { - inlineArray.ExecuteTestCase(t, test) - } -} - -func TestMutationInlineArrayWithNillableInts(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple inline array with no filter, nillable ints", - Request: `mutation { - update_Users(data: "{\"testScores\": [null, 2, 3, null, 8]}") { - name - testScores - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "testScores": [1, null, 3] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "testScores": []immutable.Option[int64]{ - immutable.None[int64](), - immutable.Some[int64](2), - immutable.Some[int64](3), - immutable.None[int64](), - immutable.Some[int64](8), - }, - }, - }, - } - - inlineArray.ExecuteTestCase(t, test) -} - -func TestMutationInlineArrayUpdateWithFloats(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Simple update mutation with float array, replace with nil", - Request: `mutation { - update_Users(data: "{\"favouriteFloats\": null}") { - name - favouriteFloats - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteFloats": [3.1425, 0.00000000001, 10] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteFloats": nil, - }, - }, - }, - { - Description: "Simple update mutation with float array, replace with empty", - Request: `mutation { - update_Users(data: "{\"favouriteFloats\": []}") { - name - favouriteFloats - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteFloats": [3.1425, 0.00000000001, 10] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteFloats": []float64{}, - }, - }, - }, - { - Description: "Simple update mutation with float array, replace with same size", - Request: `mutation { - update_Users(data: "{\"favouriteFloats\": [3.1425, -0.00000000001, 1000000]}") { - name - favouriteFloats - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteFloats": [3.1425, 0.00000000001, 10] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteFloats": []float64{3.1425, -0.00000000001, 1000000}, - }, - }, - }, - { - Description: "Simple update mutation with float array, replace with smaller size", - Request: `mutation { - update_Users(data: "{\"favouriteFloats\": [3.14]}") { - name - favouriteFloats - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteFloats": [3.1425, 0.00000000001, 10] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteFloats": []float64{3.14}, - }, - }, - }, - { - Description: "Simple update mutation with float array, replace with larger size", - Request: `mutation { - update_Users(data: "{\"favouriteFloats\": [3.1425, 0.00000000001, -10, 6.626070]}") { - name - favouriteFloats - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "favouriteFloats": [3.1425, 0.00000000001, 10] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "favouriteFloats": []float64{3.1425, 0.00000000001, -10, 6.626070}, - }, - }, - }, - } - - for _, test := range tests { - inlineArray.ExecuteTestCase(t, test) - } -} - -func TestMutationInlineArrayWithNillableFloats(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple inline array with no filter, nillable floats", - Request: `mutation { - update_Users(data: "{\"pageRatings\": [3.1425, -0.00000000001, null, 10]}") { - name - pageRatings - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "pageRatings": [3.1425, null, -0.00000000001, 10] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "pageRatings": []immutable.Option[float64]{ - immutable.Some(3.1425), - immutable.Some(-0.00000000001), - immutable.None[float64](), - immutable.Some[float64](10), - }, - }, - }, - } - - inlineArray.ExecuteTestCase(t, test) -} - -func TestMutationInlineArrayUpdateWithStrings(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Simple update mutation with string array, replace with nil", - Request: `mutation { - update_Users(data: "{\"preferredStrings\": null}") { - name - preferredStrings - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "preferredStrings": ["", "the previous", "the first", "empty string"] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "preferredStrings": nil, - }, - }, - }, - { - Description: "Simple update mutation with string array, replace with empty", - Request: `mutation { - update_Users(data: "{\"preferredStrings\": []}") { - name - preferredStrings - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "preferredStrings": ["", "the previous", "the first", "empty string"] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "preferredStrings": []string{}, - }, - }, - }, - { - Description: "Simple update mutation with string array, replace with same size", - Request: `mutation { - update_Users(data: "{\"preferredStrings\": [null, \"the previous\", \"the first\", \"null string\"]}") { - name - preferredStrings - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "preferredStrings": ["", "the previous", "the first", "empty string"] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "preferredStrings": []string{"", "the previous", "the first", "null string"}, - }, - }, - }, - { - Description: "Simple update mutation with string array, replace with smaller size", - Request: `mutation { - update_Users(data: "{\"preferredStrings\": [\"\", \"the first\"]}") { - name - preferredStrings - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "preferredStrings": ["", "the previous", "the first", "empty string"] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "preferredStrings": []string{"", "the first"}, - }, - }, - }, - { - Description: "Simple update mutation with string array, replace with larger size", - Request: `mutation { - update_Users(data: "{\"preferredStrings\": [\"\", \"the previous\", \"the first\", \"empty string\", \"blank string\", \"hitchi\"]}") { - name - preferredStrings - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "preferredStrings": ["", "the previous", "the first", "empty string"] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "preferredStrings": []string{ - "", - "the previous", - "the first", - "empty string", - "blank string", - "hitchi", - }, - }, - }, - }, - } - - for _, test := range tests { - inlineArray.ExecuteTestCase(t, test) - } -} - -func TestMutationInlineArrayWithNillableStrings(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple inline array with no filter, nillable strings", - Request: `mutation { - update_Users(data: "{\"pageHeaders\": [\"\", \"the previous\", null, \"empty string\", \"blank string\", \"hitchi\"]}") { - name - pageHeaders - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "pageHeaders": ["", "the previous", "the first", "empty string", null] - }`, - }, - }, - Results: []map[string]any{ - { - "name": "John", - "pageHeaders": []immutable.Option[string]{ - immutable.Some(""), - immutable.Some("the previous"), - immutable.None[string](), - immutable.Some("empty string"), - immutable.Some("blank string"), - immutable.Some("hitchi"), - }, - }, - }, - } - - inlineArray.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/inline_array/utils.go b/tests/integration/mutation/inline_array/utils.go deleted file mode 100644 index c866711614..0000000000 --- a/tests/integration/mutation/inline_array/utils.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package inline_array - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) - -var userCollectionGQLSchema = (` - type Users { - name: String - likedIndexes: [Boolean!] - indexLikesDislikes: [Boolean] - favouriteIntegers: [Int!] - testScores: [Int] - favouriteFloats: [Float!] - pageRatings: [Float] - preferredStrings: [String!] - pageHeaders: [String] - } -`) - -func ExecuteTestCase(t *testing.T, test testUtils.RequestTestCase) { - testUtils.ExecuteRequestTestCase(t, userCollectionGQLSchema, []string{"Users"}, test) -} diff --git a/tests/integration/mutation/simple/mix/with_txn_test.go b/tests/integration/mutation/mix/with_txn_test.go similarity index 97% rename from tests/integration/mutation/simple/mix/with_txn_test.go rename to tests/integration/mutation/mix/with_txn_test.go index a2d1e9b08e..3b12513a23 100644 --- a/tests/integration/mutation/simple/mix/with_txn_test.go +++ b/tests/integration/mutation/mix/with_txn_test.go @@ -16,7 +16,6 @@ import ( "github.com/sourcenetwork/immutable" testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/simple" ) func TestMutationWithTxnDeletesUserGivenSameTransaction(t *testing.T) { @@ -247,6 +246,14 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) test := testUtils.TestCase{ Description: "Update by two different transactions", Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, testUtils.CreateDoc{ CollectionID: 0, Doc: `{ @@ -315,5 +322,5 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) }, } - simpleTests.Execute(t, test) + testUtils.ExecuteTestCase(t, test) } diff --git a/tests/integration/mutation/one_to_many/update/related_object_link_test.go b/tests/integration/mutation/one_to_many/update/related_object_link_test.go deleted file mode 100644 index 5c9f235b1a..0000000000 --- a/tests/integration/mutation/one_to_many/update/related_object_link_test.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "fmt" - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - fixture "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_many" -) - -func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to many update mutation using relation id from single side (wrong)", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ // NOTE: There is no `published_id` on book. - Request: fmt.Sprintf( - `mutation { - update_Author(id: "%s", data: "{\"published_id\": \"%s\"}") { - name - } - }`, - author2Key, - bookKey, - ), - ExpectedError: "The given field does not exist. Name: published_id", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -// Note: This test should probably not pass, as it contains a -// reference to a document that doesnt exist. -func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to many update mutation using relation id from many side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author_id\": \"%s\"}") { - name - } - }`, - bookKey, - invalidAuthorKey, - ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": []map[string]any{}, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": nil, // Linked to incorrect id - }, - }, - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to many update mutation using relation id from many side, with a wrong field.", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"notName\": \"Unpainted Condo\",\"author_id\": \"%s\"}") { - name - } - }`, - bookKey, - author2Key, - ), - ExpectedError: "The given field does not exist. Name: notName", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to many update mutation using relation id from many side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author_id\": \"%s\"}") { - name - } - }`, - bookKey, - author2Key, - ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": []map[string]any{}, - }, - { - "name": "New Shahzad", - "published": []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "New Shahzad", - }, - }, - }, - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/one_to_many/update/related_object_link_with_alias_test.go b/tests/integration/mutation/one_to_many/update/related_object_link_with_alias_test.go deleted file mode 100644 index f579796e96..0000000000 --- a/tests/integration/mutation/one_to_many/update/related_object_link_with_alias_test.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "fmt" - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - fixture "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_many" -) - -func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to many update mutation using relation alias name from single side (wrong)", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ // NOTE: There is no `published_id` and so `published` alias is invalid use on book. - Request: fmt.Sprintf( - `mutation { - update_Author(id: "%s", data: "{\"published\": \"%s\"}") { - name - } - }`, - author2Key, - bookKey, - ), - ExpectedError: "The given field or alias to field does not exist. Name: published", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -// Note: This test should probably not pass, as it contains a -// reference to a document that doesnt exist. -func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to many update mutation using relation alias name from many side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author\": \"%s\"}") { - name - } - }`, - bookKey, - invalidAuthorKey, - ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": []map[string]any{}, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": nil, // Linked to incorrect id - }, - }, - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to many update mutation using relation alias name from many side, with a wrong field.", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"notName\": \"Unpainted Condo\",\"author\": \"%s\"}") { - name - } - }`, - bookKey, - author2Key, - ), - ExpectedError: "The given field does not exist. Name: notName", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to many update mutation using relation alias name from many side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author\": \"%s\"}") { - name - } - }`, - bookKey, - author2Key, - ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": []map[string]any{}, - }, - { - "name": "New Shahzad", - "published": []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "New Shahzad", - }, - }, - }, - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/one_to_one/update/related_object_link_test.go b/tests/integration/mutation/one_to_one/update/related_object_link_test.go deleted file mode 100644 index 5928b591fb..0000000000 --- a/tests/integration/mutation/one_to_one/update/related_object_link_test.go +++ /dev/null @@ -1,437 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "fmt" - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - fixture "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_one" -) - -// Note: This test should probably not pass, as even after updating a link to a new document -// from one side the previous link still remains on the other side of the link. -func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using relation id from single side (wrong)", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Author(id: "%s", data: "{\"published_id\": \"%s\"}") { - name - } - }`, - author2Key, - bookKey, - ), - Results: []map[string]any{ - { - "name": "New Shahzad", - }, - }, - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - { - "name": "New Shahzad", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -// Note: This test should probably not pass, as even after updating a link to a new document -// from one side the previous link still remains on the other side of the link. -func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using relation id from secondary side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author_id\": \"%s\"}") { - name - } - }`, - bookKey, - author2Key, - ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - { - "name": "New Shahzad", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidLenSubKey := "35953ca-518d-9e6b-9ce6cd00eff5" - invalidAuthorKey := "bae-" + invalidLenSubKey - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using invalid relation id", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author_id\": \"%s\"}") { - name - } - }`, - bookKey, - invalidAuthorKey, - ), - ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubKey + "\"", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using relation id from secondary side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author_id\": \"%s\"}") { - name - } - }`, - bookKey, - invalidAuthorKey, - ), - ExpectedError: "no document for the given key exists", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using relation id from secondary side, with a wrong field.", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"notName\": \"Unpainted Condo\",\"author_id\": \"%s\"}") { - name - } - }`, - bookKey, - author2Key, - ), - ExpectedError: "The given field does not exist. Name: notName", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/one_to_one/update/related_object_link_with_alias_test.go b/tests/integration/mutation/one_to_one/update/related_object_link_with_alias_test.go deleted file mode 100644 index 271edf26d1..0000000000 --- a/tests/integration/mutation/one_to_one/update/related_object_link_with_alias_test.go +++ /dev/null @@ -1,427 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "fmt" - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - fixture "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_one" -) - -// Note: This test should probably not pass, as even after updating a link to a new document -// from one side the previous link still remains on the other side of the link. -func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using alias relation id from single side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Author(id: "%s", data: "{\"published\": \"%s\"}") { - name - } - }`, - author2Key, - bookKey, - ), - Results: []map[string]any{ - { - "name": "New Shahzad", - }, - }, - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - { - "name": "New Shahzad", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -// Note: This test should probably not pass, as even after updating a link to a new document -// from one side the previous link still remains on the other side of the link. -func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using alias relation id from secondary side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author\": \"%s\"}") { - name - } - }`, - bookKey, - author2Key, - ), - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - { - "name": "New Shahzad", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidLenSubKey := "35953ca-518d-9e6b-9ce6cd00eff5" - invalidAuthorKey := "bae-" + invalidLenSubKey - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using invalid alias relation id", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author\": \"%s\"}") { - name - } - }`, - bookKey, - invalidAuthorKey, - ), - ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubKey + "\"", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using alias relation id from secondary side", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"author\": \"%s\"}") { - name - } - }`, - bookKey, - invalidAuthorKey, - ), - ExpectedError: "no document for the given key exists", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" - - test := testUtils.TestCase{ - Description: "One to one update mutation using relation alias name from secondary side, with a wrong field.", - Actions: []any{ - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"John Grisham\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author1Key, - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_Author(data: "{\"name\": \"New Shahzad\"}") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": author2Key, - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - create_Book(data: "{\"name\": \"Painted House\",\"author\": \"%s\"}") { - _key - name - } - }`, - author1Key, - ), - Results: []map[string]any{ - { - "_key": bookKey, - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: fmt.Sprintf( - `mutation { - update_Book(id: "%s", data: "{\"notName\": \"Unpainted Condo\",\"author\": \"%s\"}") { - name - } - }`, - bookKey, - author2Key, - ), - ExpectedError: "The given field does not exist. Name: notName", - }, - }, - } - - fixture.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/one_to_one/update/with_simple_test.go b/tests/integration/mutation/one_to_one/update/with_simple_test.go deleted file mode 100644 index e01a6253fa..0000000000 --- a/tests/integration/mutation/one_to_one/update/with_simple_test.go +++ /dev/null @@ -1,192 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/one_to_one" -) - -// Note: This test should probably not pass, as it contains a -// reference to a document that doesnt exist. -func TestMutationUpdateOneToOneNoChild(t *testing.T) { - test := testUtils.TestCase{ - Description: "One to one create mutation, from the wrong side", - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{ - "name": "John" - }`, - }, - testUtils.Request{ - Request: `mutation { - update_Author(data: "{\"name\": \"John Grisham\",\"published_id\": \"bae-fd541c25-229e-5280-b44b-e5c2af3e374d\"}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - }, - }, - }, - }, - } - simpleTests.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToOne(t *testing.T) { - test := testUtils.TestCase{ - Description: "One to one update mutation", - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{ - "name": "John Grisham" - }`, - }, - testUtils.Request{ - Request: ` - mutation { - update_Author(data: "{\"name\": \"John Grisham\",\"published_id\": \"bae-3d236f89-6a31-5add-a36a-27971a2eac76\"}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - }, - }, - }, - testUtils.Request{ - Request: ` - query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, - }, - testUtils.Request{ - Request: ` - query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, - }, - } - - simpleTests.ExecuteTestCase(t, test) -} - -func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { - test := testUtils.TestCase{ - Description: "One to one create mutation, from the secondary side", - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{ - "name": "John Grisham" - }`, - }, - testUtils.Request{ - Request: ` - mutation { - update_Book(data: "{\"name\": \"Painted House\",\"author_id\": \"bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed\"}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - testUtils.Request{ - Request: ` - query { - Book { - name - author { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, - }, - testUtils.Request{ - Request: ` - query { - Author { - name - published { - name - } - } - }`, - Results: []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, - }, - } - simpleTests.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/simple/create/simple_test.go b/tests/integration/mutation/simple/create/simple_test.go deleted file mode 100644 index b173611f7f..0000000000 --- a/tests/integration/mutation/simple/create/simple_test.go +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package create - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/simple" -) - -func TestMutationCreateSimpleErrorsGivenNonExistantField(t *testing.T) { - test := testUtils.TestCase{ - Description: "Simple create mutation with non existant field", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.Request{ - Request: `mutation { - create_Users(data: "{\"name\": \"John\",\"fieldDoesNotExist\": 27}") { - _key - } - }`, - ExpectedError: "The given field does not exist. Name: fieldDoesNotExist", - }, - testUtils.Request{ - // Ensure that no documents have been written. - Request: ` - query { - Users { - name - } - } - `, - Results: []map[string]any{}, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestMutationCreateSimple(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple create mutation", - Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { - _key - name - age - } - }`, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": uint64(27), - "name": "John", - }, - }, - } - - simpleTests.ExecuteTestCase(t, test) -} - -func TestMutationCreateSimpleDoesNotCreateDocGivenDuplicate(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple create mutation where document already exists.", - Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27}") { - _key - name - age - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27 - }`, - }, - }, - ExpectedError: "a document with the given dockey already exists. DocKey: ", - } - - simpleTests.ExecuteTestCase(t, test) -} - -func TestMutationCreateSimpleDoesNotCreateDocEmptyData(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple create mutation with empty data param.", - Request: `mutation { - create_User(data: "") { - _key - name - age - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27 - }`, - }, - }, - ExpectedError: "given data payload is empty", - } - - simpleTests.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/simple/create/with_version_test.go b/tests/integration/mutation/simple/create/with_version_test.go deleted file mode 100644 index 8a9abcaaf2..0000000000 --- a/tests/integration/mutation/simple/create/with_version_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package create - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/simple" -) - -func TestMutationCreateSimpleReturnVersionCID(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple create mutation", - Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { - _version { - cid - } - } - }`, - Results: []map[string]any{ - { - "_version": []map[string]any{ - { - "cid": "bafybeif5xonyzwmg5y5ocebvjkb4vs3i3qmrnuwwtf4yshvabqcqcxnwky", - }, - }, - }, - }, - } - - simpleTests.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/simple/delete/multi_ids_test.go b/tests/integration/mutation/simple/delete/multi_ids_test.go deleted file mode 100644 index 80585d5d79..0000000000 --- a/tests/integration/mutation/simple/delete/multi_ids_test.go +++ /dev/null @@ -1,478 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package delete - -import ( - "testing" - - "github.com/sourcenetwork/immutable" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/simple" -) - -func TestDeletionOfMultipleDocumentUsingMultipleKeysWhereOneExists(t *testing.T) { - test := testUtils.TestCase{ - Description: "Simple multi-key delete mutation with one key that exists.", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type User { - name: String - age: Int - points: Float - verified: Boolean - } - `, - }, - testUtils.CreateDoc{ - Doc: `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - testUtils.Request{ - TransactionID: immutable.Some(0), - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { - _key - } - }`, - Results: []map[string]any{ - { - "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - }, - }, - testUtils.Request{ - TransactionID: immutable.Some(0), - Request: `query { - User(dockeys: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { - _key - } - }`, - Results: []map[string]any{}, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestDeletionOfMultipleDocumentUsingMultipleKeys_Success(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Delete multiple documents that exist, when given multiple keys.", - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { - _key - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", - }, - { - "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - }, - ExpectedError: "", - }, - - { - Description: "Delete multiple documents that exist, when given multiple keys with alias.", - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { - AliasKey: _key - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - Results: []map[string]any{ - { - "AliasKey": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", - }, - { - "AliasKey": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - }, - ExpectedError: "", - }, - - { - Description: "Delete multiple documents that exist, where an update happens too.", - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { - AliasKey: _key - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - Updates: map[int]map[int][]string{ - 0: { - 0: { - `{ - "age": 27, - "points": 48.2, - "verified": false - }`, - }, - }, - }, - Results: []map[string]any{ - { - "AliasKey": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", - }, - { - "AliasKey": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - }, - ExpectedError: "", - }, - } - - for _, test := range tests { - simpleTests.ExecuteTestCase(t, test) - } -} - -func TestDeleteWithEmptyIdsSet(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Deletion of using ids, empty ids set.", - Request: `mutation { - delete_User(ids: []) { - _key - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - Results: []map[string]any{}, - } - simpleTests.ExecuteTestCase(t, test) -} - -func TestDeleteWithSingleUnknownIds(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Deletion of using ids, single unknown item.", - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507e"]) { - _key - } - }`, - Results: []map[string]any{}, - } - simpleTests.ExecuteTestCase(t, test) -} - -func TestDeleteWithMultipleUnknownIds(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Deletion of using ids, multiple unknown items.", - Request: `mutation { - delete_User(ids: ["bae-028383cc-d6ba-5df7-959f-2bdce3536a05", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { - _key - } - }`, - Results: []map[string]any{}, - } - simpleTests.ExecuteTestCase(t, test) -} - -func TestDeleteWithUnknownAndKnownIds(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Deletion of using ids, known and unknown items.", - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { - _key - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - }, - } - simpleTests.ExecuteTestCase(t, test) -} - -func TestDeleteWithKnownIdsAndEmptyFilter(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Deletion of using ids and filter, known id and empty filter.", - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"], filter: {}) { - _key - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - }, - } - simpleTests.ExecuteTestCase(t, test) -} - -func TestDeletionOfMultipleDocumentUsingMultipleKeys_Failure(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Delete multiple documents that exist without sub selection, should give error.", - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - Results: []map[string]any{}, - ExpectedError: "Field \"delete_User\" of type \"[User]\" must have a sub selection.", - }, - - { - Description: "Delete multiple documents that exist without _key sub-selection.", - Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - Results: []map[string]any{}, - ExpectedError: "Syntax Error GraphQL request (2:114) Unexpected empty IN {}\n\n1: mutation {\n2: \\u0009\\u0009\\u0009\\u0009\\u0009\\u0009delete_User(ids: [\"bae-6a6482a8-24e1-5c73-a237-ca569e41507d\", \"bae-3a1a496e-24eb-5ae3-9c17-524c146a393e\"]) {\n ^\n3: \\u0009\\u0009\\u0009\\u0009\\u0009\\u0009}\n", - }, - } - - for _, test := range tests { - simpleTests.ExecuteTestCase(t, test) - } -} - -func TestDeletionOfMultipleDocumentsUsingSingleKeyWithShowDeletedDocumentQuery_Success(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type User { - name: String - age: Int - } - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John", - "age": 43 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Andy", - "age": 74 - }`, - }, - testUtils.Request{ - Request: `mutation { - delete_User(ids: ["bae-05de0e64-f300-55b3-8973-5fa79045a083", "bae-07e5c44c-ee88-5c92-85ad-fb3148c48bef"]){ - _key - } - }`, - Results: []map[string]any{ - { - "_key": "bae-05de0e64-f300-55b3-8973-5fa79045a083", - }, - { - "_key": "bae-07e5c44c-ee88-5c92-85ad-fb3148c48bef", - }, - }, - }, - testUtils.Request{ - Request: `query { - User(showDeleted: true) { - _deleted - name - age - } - }`, - Results: []map[string]any{ - { - "_deleted": true, - "name": "Andy", - "age": uint64(74), - }, - { - "_deleted": true, - "name": "John", - "age": uint64(43), - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestDeletionOfMultipleDocumentsUsingEmptySet(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type User { - name: String - age: Int - } - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John", - "age": 43 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Andy", - "age": 74 - }`, - }, - testUtils.Request{ - Request: `mutation { - delete_User(ids: []){ - _key - } - }`, - Results: []map[string]any{}, - }, - testUtils.Request{ - // Make sure no documents have been deleted - Request: `query { - User { - name - age - } - }`, - Results: []map[string]any{ - { - "name": "Andy", - "age": uint64(74), - }, - { - "name": "John", - "age": uint64(43), - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/simple/delete/single_id_test.go b/tests/integration/mutation/simple/delete/single_id_test.go deleted file mode 100644 index a3ec777233..0000000000 --- a/tests/integration/mutation/simple/delete/single_id_test.go +++ /dev/null @@ -1,282 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package delete - -import ( - "testing" - - "github.com/sourcenetwork/immutable" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/simple" -) - -func TestDeletionOfADocumentUsingSingleKeyWhereDocExists(t *testing.T) { - test := testUtils.TestCase{ - Description: "Simple delete mutation where one element exists.", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type User { - name: String - age: Int - points: Float - verified: Boolean - } - `, - }, - testUtils.CreateDoc{ - Doc: `{ - "name": "Shahzad", - "age": 26, - "points": 48.5, - "verified": true - }`, - }, - testUtils.Request{ - TransactionID: immutable.Some(0), - Request: `mutation { - delete_User(id: "bae-8ca944fd-260e-5a44-b88f-326d9faca810") { - _key - } - }`, - Results: []map[string]any{ - { - "_key": "bae-8ca944fd-260e-5a44-b88f-326d9faca810", - }, - }, - }, - testUtils.Request{ - TransactionID: immutable.Some(0), - Request: `query { - User(dockey: "bae-8ca944fd-260e-5a44-b88f-326d9faca810") { - _key - } - }`, - - // explicitly empty - Results: []map[string]any{}, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Simple delete mutation with an aliased _key name.", - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.5, - "verified": true - }`, - }, - }, - Request: `mutation { - delete_User(id: "bae-8ca944fd-260e-5a44-b88f-326d9faca810") { - fancyKey: _key - } - }`, - - Results: []map[string]any{ - { - "fancyKey": "bae-8ca944fd-260e-5a44-b88f-326d9faca810", - }, - }, - ExpectedError: "", - }, - { - Description: "Delete an updated document and return an aliased _key name.", - Request: `mutation { - delete_User(id: "bae-8ca944fd-260e-5a44-b88f-326d9faca810") { - myTestKey: _key - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.5, - "verified": true - }`, - }, - }, - Updates: map[int]map[int][]string{ - 0: { - 0: { - `{ - "age": 27, - "points": 48.2, - "verified": false - }`, - }, - }, - }, - Results: []map[string]any{ - { - "myTestKey": "bae-8ca944fd-260e-5a44-b88f-326d9faca810", - }, - }, - ExpectedError: "", - }, - } - - for _, test := range tests { - simpleTests.ExecuteTestCase(t, test) - } -} - -func TestDeleteWithUnknownIdEmptyCollection(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Deletion using id that doesn't exist, where the collection is empty.", - Request: `mutation { - delete_User(id: "bae-028383cc-d6ba-5df7-959f-2bdce3536a05") { - _key - } - }`, - Docs: map[int][]string{}, - Results: []map[string]any{}, - } - simpleTests.ExecuteTestCase(t, test) -} - -func TestDeleteWithUnknownId(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Deletion using id that doesn't exist, where the collection is non-empty.", - Request: `mutation { - delete_User(id: "bae-8ca944fd-260e-5a44-b88f-326d9faca811") { - _key - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.5, - "verified": true - }`, - }, - }, - Results: []map[string]any{}, - } - simpleTests.ExecuteTestCase(t, test) -} - -func TestDeletionOfADocumentUsingSingleKey_Failure(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Deletion of a document without sub selection, should give error.", - Request: `mutation { - delete_User(id: "bae-8ca944fd-260e-5a44-b88f-326d9faca810") - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.5, - "verified": true - }`, - }, - }, - Results: []map[string]any{}, - ExpectedError: "Field \"delete_User\" of type \"[User]\" must have a sub selection.", - }, - - { - Description: "Deletion of a document without _key sub-selection.", - Request: `mutation { - delete_User(id: "bae-8ca944fd-260e-5a44-b88f-326d9faca810") { - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.5, - "verified": true - }`, - }, - }, - Results: []map[string]any{}, - ExpectedError: "Syntax Error GraphQL request (2:67) Unexpected empty IN {}\n\n1: mutation {\n2: \\u0009\\u0009\\u0009\\u0009\\u0009\\u0009delete_User(id: \"bae-8ca944fd-260e-5a44-b88f-326d9faca810\") {\n ^\n3: \\u0009\\u0009\\u0009\\u0009\\u0009\\u0009}\n", - }, - } - - for _, test := range tests { - simpleTests.ExecuteTestCase(t, test) - } -} - -func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery_Success(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type User { - name: String - age: Int - } - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John", - "age": 43 - }`, - }, - testUtils.Request{ - Request: `mutation { - delete_User(id: "bae-07e5c44c-ee88-5c92-85ad-fb3148c48bef") { - _deleted - _key - } - }`, - Results: []map[string]any{ - { - // Note: This should show a `Deleted` status but the order of the planNodes - // makes it so the status is requested prior to deleting. If the planNode ordering - // can be altered, this can change in the future. - "_deleted": false, - "_key": "bae-07e5c44c-ee88-5c92-85ad-fb3148c48bef", - }, - }, - }, - testUtils.Request{ - Request: `query { - User(showDeleted: true) { - _deleted - name - age - } - }`, - Results: []map[string]any{ - { - "_deleted": true, - "name": "John", - "age": uint64(43), - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/simple/delete/with_filter_test.go b/tests/integration/mutation/simple/delete/with_filter_test.go deleted file mode 100644 index 6c934bfd5f..0000000000 --- a/tests/integration/mutation/simple/delete/with_filter_test.go +++ /dev/null @@ -1,418 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package delete - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/simple" -) - -func TestDeletionOfDocumentsWithFilter_Success(t *testing.T) { - tests := []testUtils.RequestTestCase{ - - { - Description: "Delete using filter - One matching document, that exists.", - - Request: `mutation { - delete_User(filter: {name: {_eq: "Shahzad"}}) { - _key - } - }`, - - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - - Results: []map[string]any{ - { - "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - }, - - ExpectedError: "", - }, - - { - Description: "Delete using filter - Multiple matching documents that exist.", - Request: `mutation { - delete_User(filter: {name: {_eq: "Shahzad"}}) { - _key - } - }`, - - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 25, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 6, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 1, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - - Results: []map[string]any{ - { - "_key": "bae-4b5b1765-560c-5843-9abc-24d21d8aa598", - }, - { - "_key": "bae-5a8530c0-c521-5e83-8243-4ce267bc76fa", - }, - { - "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - { - "_key": "bae-ca88bc10-1415-59b1-a72c-d19ed44d4e15", - }, - }, - - ExpectedError: "", - }, - - { - Description: "Delete using filter - Multiple matching documents that exist with alias.", - - Request: `mutation { - delete_User(filter: { - _and: [ - {age: {_lt: 26}}, - {verified: {_eq: true}}, - ] - }) { - DeletedKeyByFilter: _key - } - }`, - - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 25, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 6, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 1, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - - Results: []map[string]any{ - { - "DeletedKeyByFilter": "bae-4b5b1765-560c-5843-9abc-24d21d8aa598", - }, - { - "DeletedKeyByFilter": "bae-5a8530c0-c521-5e83-8243-4ce267bc76fa", - }, - { - "DeletedKeyByFilter": "bae-ca88bc10-1415-59b1-a72c-d19ed44d4e15", - }, - }, - - ExpectedError: "", - }, - - { - Description: "Delete using filter - Match everything in this collection.", - - Request: `mutation { - delete_User(filter: {}) { - DeletedKeyByFilter: _key - } - }`, - - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 25, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 6, - "points": 48.48, - "verified": true - }`, - `{ - "name": "Shahzad", - "age": 1, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - - Results: []map[string]any{ - { - "DeletedKeyByFilter": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", - }, - { - "DeletedKeyByFilter": "bae-4b5b1765-560c-5843-9abc-24d21d8aa598", - }, - { - "DeletedKeyByFilter": "bae-5a8530c0-c521-5e83-8243-4ce267bc76fa", - }, - { - "DeletedKeyByFilter": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", - }, - { - "DeletedKeyByFilter": "bae-ca88bc10-1415-59b1-a72c-d19ed44d4e15", - }, - }, - - ExpectedError: "", - }, - } - - for _, test := range tests { - simpleTests.ExecuteTestCase(t, test) - } -} - -func TestDeletionOfDocumentsWithFilter_Failure(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "No delete with filter: because no document matches filter.", - - Request: `mutation { - delete_User(filter: {name: {_eq: "Lone"}}) { - _key - } - }`, - - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - - Results: []map[string]any{}, - - ExpectedError: "", - }, - - { - Description: "No delete with filter: because the collection is empty.", - - Request: `mutation { - delete_User(filter: {name: {_eq: "Shahzad"}}) { - _key - } - }`, - - Docs: map[int][]string{}, - - Results: []map[string]any{}, - - ExpectedError: "", - }, - - { - Description: "No delete with filter: because has no sub-selection.", - - Request: `mutation { - delete_User(filter: {name: {_eq: "Shahzad"}}) - }`, - - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - - Results: []map[string]any{}, - - ExpectedError: "Field \"delete_User\" of type \"[User]\" must have a sub selection.", - }, - - { - Description: "No delete with filter: because has no _key in sub-selection.", - - Request: `mutation { - delete_User(filter: {name: {_eq: "Shahzad"}}) { - } - }`, - - Docs: map[int][]string{ - 0: { - `{ - "name": "Shahzad", - "age": 26, - "points": 48.48, - "verified": true - }`, - `{ - "name": "John", - "age": 26, - "points": 48.48, - "verified": true - }`, - }, - }, - - Results: []map[string]any{}, - - ExpectedError: "Syntax Error GraphQL request (2:53) Unexpected empty IN {}\n\n1: mutation {\n2: \\u0009\\u0009\\u0009\\u0009\\u0009\\u0009delete_User(filter: {name: {_eq: \"Shahzad\"}}) {\n ^\n3: \\u0009\\u0009\\u0009\\u0009\\u0009\\u0009}\n", - }, - } - - for _, test := range tests { - simpleTests.ExecuteTestCase(t, test) - } -} - -func TestDeletionOfDocumentsWithFilterWithShowDeletedDocumentQuery_Success(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type User { - name: String - age: Int - } - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John", - "age": 43 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Andy", - "age": 74 - }`, - }, - testUtils.Request{ - Request: `mutation { - delete_User(filter: {name: {_eq: "John"}}) { - _key - } - }`, - Results: []map[string]any{ - { - "_key": "bae-07e5c44c-ee88-5c92-85ad-fb3148c48bef", - }, - }, - }, - testUtils.Request{ - Request: `query { - User(showDeleted: true) { - _deleted - name - age - } - }`, - Results: []map[string]any{ - { - "_deleted": false, - "name": "Andy", - "age": uint64(74), - }, - { - "_deleted": true, - "name": "John", - "age": uint64(43), - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/simple/update/utils.go b/tests/integration/mutation/simple/update/utils.go deleted file mode 100644 index 920282bdea..0000000000 --- a/tests/integration/mutation/simple/update/utils.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) - -var userSchema = (` - type User { - name: String - age: Int - points: Float - verified: Boolean - created_at: DateTime - } -`) - -func ExecuteTestCase(t *testing.T, test testUtils.RequestTestCase) { - testUtils.ExecuteRequestTestCase(t, userSchema, []string{"User"}, test) -} diff --git a/tests/integration/mutation/simple/update/with_datetime_test.go b/tests/integration/mutation/simple/update/with_datetime_test.go deleted file mode 100644 index e7a93605d6..0000000000 --- a/tests/integration/mutation/simple/update/with_datetime_test.go +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) - -func TestSimpleDateTimeMutationUpdateWithBooleanFilter(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Simple DateTime update mutation with boolean equals filter", - Request: `mutation { - update_User(filter: {verified: {_eq: true}}, data: "{\"created_at\": \"2021-07-23T03:46:56.647Z\"}") { - _key - name - created_at - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1, - "created_at": "2011-07-23T03:46:56.647Z" - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-e0374cf9-4e46-5494-bb8a-6dea31912d6b", - "name": "John", - "created_at": "2021-07-23T03:46:56.647Z", - }, - }, - }, - { - Description: "Simple DateTime update mutation with boolean equals filter, multiple rows but single match", - Request: `mutation { - update_User(filter: {verified: {_eq: true}}, data: "{\"created_at\": \"2021-07-23T03:46:56.647Z\"}") { - _key - name - created_at - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1, - "created_at": "2011-07-23T03:46:56.647Z" - }`, - `{ - "name": "Bob", - "age": 39, - "verified": false, - "points": 66.6, - "created_at": "2041-07-23T03:46:56.647Z" - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-e0374cf9-4e46-5494-bb8a-6dea31912d6b", - "name": "John", - "created_at": "2021-07-23T03:46:56.647Z", - }, - }, - }, - { - Description: "Simple DateTime update mutation with boolean equals filter, multiple rows", - Request: `mutation { - update_User(filter: {verified: {_eq: true}}, data: "{\"created_at\": \"2021-07-23T03:46:56.647Z\"}") { - _key - name - created_at - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1, - "created_at": "2011-07-23T03:46:56.647Z" - }`, - `{ - "name": "Bob", - "age": 39, - "verified": true, - "points": 66.6, - "created_at": "2001-07-23T03:46:56.647Z" - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-b2f6bd19-56bb-5717-8367-a638e3ca52e0", - "name": "Bob", - "created_at": "2021-07-23T03:46:56.647Z", - }, - { - "_key": "bae-e0374cf9-4e46-5494-bb8a-6dea31912d6b", - "name": "John", - "created_at": "2021-07-23T03:46:56.647Z", - }, - }, - }, - } - - for _, test := range tests { - ExecuteTestCase(t, test) - } -} - -func TestSimpleDateTimeMutationUpdateWithIdInFilter(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple DateTime update mutation with id in filter, multiple rows", - Request: `mutation { - update_User(ids: ["bae-e0374cf9-4e46-5494-bb8a-6dea31912d6b", "bae-b2f6bd19-56bb-5717-8367-a638e3ca52e0"], data: "{\"created_at\": \"2021-07-23T03:46:56.647Z\"}") { - _key - name - created_at - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1, - "created_at": "2011-07-23T03:46:56.647Z" - }`, - `{ - "name": "Bob", - "age": 39, - "verified": true, - "points": 66.6, - "created_at": "2001-07-23T03:46:56.647Z" - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-b2f6bd19-56bb-5717-8367-a638e3ca52e0", - "name": "Bob", - "created_at": "2021-07-23T03:46:56.647Z", - }, - { - "_key": "bae-e0374cf9-4e46-5494-bb8a-6dea31912d6b", - "name": "John", - "created_at": "2021-07-23T03:46:56.647Z", - }, - }, - } - - ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/simple/update/with_filter_test.go b/tests/integration/mutation/simple/update/with_filter_test.go deleted file mode 100644 index 86ef991567..0000000000 --- a/tests/integration/mutation/simple/update/with_filter_test.go +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package update - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) - -func TestSimpleMutationUpdateWithBooleanFilterWhereResultFilteredOut(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple update mutation with boolean equals filter", - // The update will result in a record that no longer matches the filter - Request: `mutation { - update_User(filter: {verified: {_eq: true}}, data: "{\"verified\":false}") { - _key - name - points - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - }, - }, - // As the record no longer matches the filter it is not returned - Results: []map[string]any{}, - } - - ExecuteTestCase(t, test) -} - -func TestSimpleMutationUpdateWithBooleanFilter(t *testing.T) { - tests := []testUtils.RequestTestCase{ - { - Description: "Simple update mutation with boolean equals filter", - Request: `mutation { - update_User(filter: {verified: {_eq: true}}, data: "{\"points\": 59}") { - _key - name - points - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "name": "John", - "points": float64(59), - }, - }, - }, - { - Description: "Simple update mutation with boolean equals filter, multiple rows but single match", - Request: `mutation { - update_User(filter: {verified: {_eq: true}}, data: "{\"points\": 59}") { - _key - name - points - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - `{ - "name": "Bob", - "age": 39, - "verified": false, - "points": 66.6 - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "name": "John", - "points": float64(59), - }, - }, - }, - { - Description: "Simple update mutation with boolean equals filter, multiple rows", - Request: `mutation { - update_User(filter: {verified: {_eq: true}}, data: "{\"points\": 59}") { - _key - name - points - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - `{ - "name": "Bob", - "age": 39, - "verified": true, - "points": 66.6 - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "name": "John", - "points": float64(59), - }, - { - "_key": "bae-455b5896-6203-582f-b46e-729c53a2d14b", - "name": "Bob", - "points": float64(59), - }, - }, - }, - } - - for _, test := range tests { - ExecuteTestCase(t, test) - } -} - -func TestSimpleMutationUpdateWithIdInFilter(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple update mutation with id in filter, multiple rows", - Request: `mutation { - update_User(ids: ["bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", "bae-958c9334-73cf-5695-bf06-cf06826babfa"], data: "{\"points\": 59}") { - _key - name - points - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - `{ - "name": "Bob", - "age": 39, - "verified": false, - "points": 66.6 - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "name": "John", - "points": float64(59), - }, - { - "_key": "bae-958c9334-73cf-5695-bf06-cf06826babfa", - "name": "Bob", - "points": float64(59), - }, - }, - } - - ExecuteTestCase(t, test) -} - -func TestSimpleMutationUpdateWithIdEqualsFilter(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple update mutation with id equals filter, multiple rows but single match", - Request: `mutation { - update_User(id: "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", data: "{\"points\": 59}") { - _key - name - points - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - `{ - "name": "Bob", - "age": 39, - "verified": false, - "points": 66.6 - }`, - }, - }, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "name": "John", - "points": float64(59), - }, - }, - } - - ExecuteTestCase(t, test) -} - -func TestSimpleMutationUpdateWithNonExistantId(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "Simple update mutation with non existant id", - Request: `mutation { - update_User(id: "bae-does-not-exist", data: "{\"points\": 59}") { - _key - name - points - } - }`, - Docs: map[int][]string{ - 0: { - `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - }, - }, - Results: []map[string]any{}, - } - - ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/simple/special/invalid_operation_test.go b/tests/integration/mutation/special/invalid_operation_test.go similarity index 60% rename from tests/integration/mutation/simple/special/invalid_operation_test.go rename to tests/integration/mutation/special/invalid_operation_test.go index 7a6f660481..4a8ae2f4c9 100644 --- a/tests/integration/mutation/simple/special/invalid_operation_test.go +++ b/tests/integration/mutation/special/invalid_operation_test.go @@ -14,19 +14,29 @@ import ( "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" - simpleTests "github.com/sourcenetwork/defradb/tests/integration/mutation/simple" ) func TestMutationInvalidMutation(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "Simple invalid mutation", - Request: `mutation { - dostuff_User(data: "") { - _key - } - }`, - ExpectedError: "Cannot query field \"dostuff_User\" on type \"Mutation\".", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + dostuff_User(data: "") { + _key + } + }`, + ExpectedError: "Cannot query field \"dostuff_User\" on type \"Mutation\".", + }, + }, } - simpleTests.ExecuteTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } diff --git a/tests/integration/mutation/update/field_kinds/array_bool_test.go b/tests/integration/mutation/update/field_kinds/array_bool_test.go new file mode 100644 index 0000000000..b1a5500242 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/array_bool_test.go @@ -0,0 +1,232 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithArrayOfBooleansToNil(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with boolean array, replace with nil", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + likedIndexes: [Boolean!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "likedIndexes": [true, true, false, true] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "likedIndexes": null + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + likedIndexes + } + } + `, + Results: []map[string]any{ + { + "likedIndexes": nil, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfBooleansToEmpty(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with boolean array, replace with empty", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + likedIndexes: [Boolean!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "likedIndexes": [true, true, false, true] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "likedIndexes": [] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + likedIndexes + } + } + `, + Results: []map[string]any{ + { + "likedIndexes": []bool{}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfBooleansToSameSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with boolean array, replace with same size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + likedIndexes: [Boolean!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "likedIndexes": [true, true, false, true] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "likedIndexes": [true, false, true, false] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + likedIndexes + } + } + `, + Results: []map[string]any{ + { + "likedIndexes": []bool{true, false, true, false}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfBooleansToSmallerSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with boolean array, replace with smaller size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + likedIndexes: [Boolean!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "likedIndexes": [true, true, false, true] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "likedIndexes": [false, true] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + likedIndexes + } + } + `, + Results: []map[string]any{ + { + "likedIndexes": []bool{false, true}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfBooleansToLargerSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with boolean array, replace with larger size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + likedIndexes: [Boolean!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "likedIndexes": [true, true, false, true] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "likedIndexes": [true, false, true, false, true, true] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + likedIndexes + } + } + `, + Results: []map[string]any{ + { + "likedIndexes": []bool{true, false, true, false, true, true}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/array_float_test.go b/tests/integration/mutation/update/field_kinds/array_float_test.go new file mode 100644 index 0000000000..a1806eadf0 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/array_float_test.go @@ -0,0 +1,232 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithArrayOfFloatsToNil(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with float array, replace with nil", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteFloats: [Float!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteFloats": null + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteFloats + } + } + `, + Results: []map[string]any{ + { + "favouriteFloats": nil, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfFloatsToEmpty(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with float array, replace with empty", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteFloats: [Float!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteFloats": [] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteFloats + } + } + `, + Results: []map[string]any{ + { + "favouriteFloats": []float64{}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfFloatsToSameSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with float array, replace with same size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteFloats: [Float!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteFloats": [3.1425, -0.00000000001, 1000000] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteFloats + } + } + `, + Results: []map[string]any{ + { + "favouriteFloats": []float64{3.1425, -0.00000000001, 1000000}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfFloatsToSmallerSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with float array, replace with smaller size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteFloats: [Float!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteFloats": [3.14] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteFloats + } + } + `, + Results: []map[string]any{ + { + "favouriteFloats": []float64{3.14}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfFloatsToLargerSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with float array, replace with larger size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteFloats: [Float!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteFloats": [3.1425, 0.00000000001, -10, 6.626070] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteFloats + } + } + `, + Results: []map[string]any{ + { + "favouriteFloats": []float64{3.1425, 0.00000000001, -10, 6.626070}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/array_int_test.go b/tests/integration/mutation/update/field_kinds/array_int_test.go new file mode 100644 index 0000000000..63ecfc0969 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/array_int_test.go @@ -0,0 +1,275 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithArrayOfIntsToNil(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with integer array, replace with nil", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteIntegers: [Int!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [1, 2, 3, 5, 8] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteIntegers": null + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteIntegers + } + } + `, + Results: []map[string]any{ + { + "favouriteIntegers": nil, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfIntsToEmpty(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with integer array, replace with empty", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteIntegers: [Int!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [1, 2, 3, 5, 8] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteIntegers": [] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteIntegers + } + } + `, + Results: []map[string]any{ + { + "favouriteIntegers": []int64{}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfIntsToSameSizePositiveValues(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with integer array, replace with same size, positive values", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteIntegers: [Int!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [1, 2, 3, 5, 8] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteIntegers": [8, 5, 3, 2, 1] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteIntegers + } + } + `, + Results: []map[string]any{ + { + "favouriteIntegers": []int64{8, 5, 3, 2, 1}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfIntsToSameSizeMixedValues(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with integer array, replace with same size, positive to mixed values", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteIntegers: [Int!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [1, 2, 3, 5, 8] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteIntegers": [-1, 2, -3, 5, -8] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteIntegers + } + } + `, + Results: []map[string]any{ + { + "favouriteIntegers": []int64{-1, 2, -3, 5, -8}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfIntsToSmallerSizePositiveValues(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with integer array, replace with smaller size, positive values", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteIntegers: [Int!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [1, 2, 3, 5, 8] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteIntegers": [1, 2, 3] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteIntegers + } + } + `, + Results: []map[string]any{ + { + "favouriteIntegers": []int64{1, 2, 3}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfIntsToLargerSizePositiveValues(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with integer array, replace with larger size, positive values", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteIntegers: [Int!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [1, 2, 3, 5, 8] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteIntegers": [1, 2, 3, 5, 8, 13, 21] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteIntegers + } + } + `, + Results: []map[string]any{ + { + "favouriteIntegers": []int64{1, 2, 3, 5, 8, 13, 21}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/array_nillable_bool_test.go b/tests/integration/mutation/update/field_kinds/array_nillable_bool_test.go new file mode 100644 index 0000000000..d069927d5e --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/array_nillable_bool_test.go @@ -0,0 +1,68 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithArrayOfNillableBooleans(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with boolean array, replace with nil", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + likedIndexes: [Boolean] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "likedIndexes": [true, true, false, true] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "likedIndexes": [true, true, false, true, null] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + likedIndexes + } + } + `, + Results: []map[string]any{ + { + "likedIndexes": []immutable.Option[bool]{ + immutable.Some(true), + immutable.Some(true), + immutable.Some(false), + immutable.Some(true), + immutable.None[bool](), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/array_nillable_float_test.go b/tests/integration/mutation/update/field_kinds/array_nillable_float_test.go new file mode 100644 index 0000000000..825461cd4c --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/array_nillable_float_test.go @@ -0,0 +1,67 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithArrayOfNillableFloats(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, nillable floats", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteFloats: [Float] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [3.1425, null, -0.00000000001, 10] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteFloats": [3.1425, -0.00000000001, null, 10] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteFloats + } + } + `, + Results: []map[string]any{ + { + "favouriteFloats": []immutable.Option[float64]{ + immutable.Some(3.1425), + immutable.Some(-0.00000000001), + immutable.None[float64](), + immutable.Some[float64](10), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/array_nillable_int_test.go b/tests/integration/mutation/update/field_kinds/array_nillable_int_test.go new file mode 100644 index 0000000000..99e28d2520 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/array_nillable_int_test.go @@ -0,0 +1,68 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithArrayOfNillableInts(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, nillable ints", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + favouriteIntegers: [Int] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [1, null, 3] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "favouriteIntegers": [null, 2, 3, null, 8] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + favouriteIntegers + } + } + `, + Results: []map[string]any{ + { + "favouriteIntegers": []immutable.Option[int64]{ + immutable.None[int64](), + immutable.Some[int64](2), + immutable.Some[int64](3), + immutable.None[int64](), + immutable.Some[int64](8), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/array_nillable_string_test.go b/tests/integration/mutation/update/field_kinds/array_nillable_string_test.go new file mode 100644 index 0000000000..eea09e0157 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/array_nillable_string_test.go @@ -0,0 +1,69 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithArrayOfStringsInts(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, nillable string", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + preferredStrings: [String] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "preferredStrings": ["", "the previous", "the first", "empty string", null] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "preferredStrings": ["", "the previous", null, "empty string", "blank string", "hitchi"] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + preferredStrings + } + } + `, + Results: []map[string]any{ + { + "preferredStrings": []immutable.Option[string]{ + immutable.Some(""), + immutable.Some("the previous"), + immutable.None[string](), + immutable.Some("empty string"), + immutable.Some("blank string"), + immutable.Some("hitchi"), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/array_string_test.go b/tests/integration/mutation/update/field_kinds/array_string_test.go new file mode 100644 index 0000000000..3481fb7912 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/array_string_test.go @@ -0,0 +1,239 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithArrayOfStringsToNil(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with string array, replace with nil", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + preferredStrings: [String!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "preferredStrings": ["", "the previous", "the first", "empty string"] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "preferredStrings": null + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + preferredStrings + } + } + `, + Results: []map[string]any{ + { + "preferredStrings": nil, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfStringsToEmpty(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with string array, replace with empty", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + preferredStrings: [String!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "preferredStrings": ["", "the previous", "the first", "empty string"] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "preferredStrings": [] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + preferredStrings + } + } + `, + Results: []map[string]any{ + { + "preferredStrings": []string{}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfStringsToSameSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with string array, replace with same size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + preferredStrings: [String!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "preferredStrings": ["", "the previous", "the first", "empty string"] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "preferredStrings": ["zeroth", "the previous", "the first", "null string"] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + preferredStrings + } + } + `, + Results: []map[string]any{ + { + "preferredStrings": []string{"zeroth", "the previous", "the first", "null string"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfStringsToSmallerSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with string array, replace with smaller size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + preferredStrings: [String!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "preferredStrings": ["", "the previous", "the first", "empty string"] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "preferredStrings": ["", "the first"] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + preferredStrings + } + } + `, + Results: []map[string]any{ + { + "preferredStrings": []string{"", "the first"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithArrayOfStringsToLargerSize(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with string array, replace with larger size", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + preferredStrings: [String!] + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "preferredStrings": ["", "the previous", "the first", "empty string"] + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "preferredStrings": ["", "the previous", "the first", "empty string", "blank string", "hitchi"] + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + preferredStrings + } + } + `, + Results: []map[string]any{ + { + "preferredStrings": []string{ + "", + "the previous", + "the first", + "empty string", + "blank string", + "hitchi", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/date_time_test.go b/tests/integration/mutation/update/field_kinds/date_time_test.go new file mode 100644 index 0000000000..3a79a2c1e0 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/date_time_test.go @@ -0,0 +1,108 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithDateTimeField(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update of date time field", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + created_at: DateTime + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "created_at": "2011-07-23T01:11:11.111Z" + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "created_at": "2021-07-23T02:22:22.222Z" + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + created_at + } + } + `, + Results: []map[string]any{ + { + "created_at": "2021-07-23T02:22:22.222Z", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithDateTimeField_MultipleDocs(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update of date time field, multiple docs", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + created_at: DateTime + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "created_at": "2011-07-23T01:11:11.111Z" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "created_at": "2021-07-23T02:22:22.222Z" + }`, + }, + testUtils.Request{ + Request: `mutation { + update_Users(data: "{\"created_at\": \"2031-07-23T03:23:23.333Z\"}") { + name + created_at + } + }`, + Results: []map[string]any{ + { + "name": "John", + "created_at": "2031-07-23T03:23:23.333Z", + }, + { + "name": "Fred", + "created_at": "2031-07-23T03:23:23.333Z", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go new file mode 100644 index 0000000000..cb87d336f0 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go @@ -0,0 +1,272 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "fmt" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation id from single side (wrong)", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 1, + DocID: 1, + // NOTE: There is no `published_id` on book. + Doc: fmt.Sprintf( + `{ + "published_id": "%s" + }`, + bookKey, + ), + ExpectedError: "The given field does not exist. Name: published_id", + }, + }, + } + + executeTestCase(t, test) +} + +// Note: This test should probably not pass, as it contains a +// reference to a document that doesnt exist. +func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation id from many side", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author_id": "%s" + }`, + invalidAuthorKey, + ), + }, + testUtils.Request{ + Request: `query { + Author { + name + published { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + "published": []map[string]any{}, + }, + }, + }, + testUtils.Request{ + Request: `query { + Book { + name + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "author": nil, // Linked to incorrect id + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation id from many side, with a wrong field.", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "notName": "Unpainted Condo", + "author_id": "%s" + }`, + author2Key, + ), + ExpectedError: "The given field does not exist. Name: notName", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation id from many side", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author_id": "%s" + }`, + author2Key, + ), + }, + testUtils.Request{ + Request: `query { + Author { + name + published { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + "published": []map[string]any{}, + }, + { + "name": "New Shahzad", + "published": []map[string]any{ + { + "name": "Painted House", + }, + }, + }, + }, + }, + testUtils.Request{ + Request: `query { + Book { + name + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "New Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/utils.go b/tests/integration/mutation/update/field_kinds/one_to_many/utils.go new file mode 100644 index 0000000000..c4ef949e53 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/one_to_many/utils.go @@ -0,0 +1,47 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func executeTestCase(t *testing.T, test testUtils.TestCase) { + testUtils.ExecuteTestCase( + t, + testUtils.TestCase{ + Description: test.Description, + SupportedMutationTypes: test.SupportedMutationTypes, + Actions: append( + []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + rating: Float + author: Author + } + + type Author { + name: String + age: Int + published: [Book] + } + `, + }, + }, + test.Actions..., + ), + }, + ) +} diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go new file mode 100644 index 0000000000..46d4eb6f32 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go @@ -0,0 +1,402 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collection(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation alias name from single side (wrong)", + // This restiction is temporary due to an inconsitent error message, see + // TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL + // and https://github.com/sourcenetwork/defradb/issues/1854 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 1, + DocID: 1, + // NOTE: There is no `published` on book. + Doc: fmt.Sprintf( + `{ + "published": "%s" + }`, + bookKey, + ), + ExpectedError: "The given field does not exist. Name: published", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation alias name from single side (wrong)", + // This restiction is temporary due to an inconsitent error message, see + // TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collection + // and https://github.com/sourcenetwork/defradb/issues/1854 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 1, + DocID: 1, + // NOTE: There is no `published` on book. + Doc: fmt.Sprintf( + `{ + "published": "%s" + }`, + bookKey, + ), + ExpectedError: "The given field or alias to field does not exist. Name: published", + }, + }, + } + + executeTestCase(t, test) +} + +// Note: This test should probably not pass, as it contains a +// reference to a document that doesnt exist. +func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation alias name from many side", + // This restiction is temporary due to a bug in the collection api, see + // TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection + // and https://github.com/sourcenetwork/defradb/issues/1703 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author": "%s" + }`, + invalidAuthorKey, + ), + }, + testUtils.Request{ + Request: `query { + Author { + name + published { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + "published": []map[string]any{}, + }, + }, + }, + testUtils.Request{ + Request: `query { + Book { + name + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "author": nil, // Linked to incorrect id + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +// Note: This test should probably not pass, as it contains a +// reference to a document that doesnt exist. +// +// This test also documents a bug in the collection api, see: +// TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL +// and https://github.com/sourcenetwork/defradb/issues/1703 for more info. +func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation alias name from many side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author": "%s" + }`, + invalidAuthorKey, + ), + ExpectedError: "The given field does not exist. Name: author", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongField_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation alias name from many side, with a wrong field.", + // This restiction is temporary due to a bug in the collection api, see + // TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection + // and https://github.com/sourcenetwork/defradb/issues/1703 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "notName": "Unpainted Condo", + "author": "%s" + }`, + author2Key, + ), + ExpectedError: "The given field does not exist. Name: notName", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation alias name from many side", + // This restiction is temporary due to a bug in the collection api, see + // TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection + // and https://github.com/sourcenetwork/defradb/issues/1703 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author": "%s" + }`, + author2Key, + ), + }, + testUtils.Request{ + Request: `query { + Author { + name + published { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + "published": []map[string]any{}, + }, + { + "name": "New Shahzad", + "published": []map[string]any{ + { + "name": "Painted House", + }, + }, + }, + }, + }, + testUtils.Request{ + Request: `query { + Book { + name + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "New Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/mutation/simple/utils.go b/tests/integration/mutation/update/field_kinds/one_to_one/utils.go similarity index 60% rename from tests/integration/mutation/simple/utils.go rename to tests/integration/mutation/update/field_kinds/one_to_one/utils.go index c45c7340be..8cd920a063 100644 --- a/tests/integration/mutation/simple/utils.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/utils.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package simple +package one_to_one import ( "testing" @@ -16,28 +16,29 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -var userSchema = (` - type User { - name: String - age: Int - points: Float - verified: Boolean - } -`) - -func ExecuteTestCase(t *testing.T, test testUtils.RequestTestCase) { - testUtils.ExecuteRequestTestCase(t, userSchema, []string{"User"}, test) -} - -func Execute(t *testing.T, test testUtils.TestCase) { +func executeTestCase(t *testing.T, test testUtils.TestCase) { testUtils.ExecuteTestCase( t, testUtils.TestCase{ - Description: test.Description, + Description: test.Description, + SupportedMutationTypes: test.SupportedMutationTypes, Actions: append( []any{ testUtils.SchemaUpdate{ - Schema: userSchema, + Schema: ` + type Book { + name: String + rating: Float + author: Author + } + + type Author { + name: String + age: Int + verified: Boolean + published: Book @primary + } + `, }, }, test.Actions..., diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go new file mode 100644 index 0000000000..57633fd126 --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -0,0 +1,265 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + + test := testUtils.TestCase{ + Description: "One to one update mutation using alias relation id from single side", + // This restiction is temporary due to a bug in the collection api, see + // https://github.com/sourcenetwork/defradb/issues/1703 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 1, + DocID: 1, + Doc: fmt.Sprintf( + `{ + "published": "%s" + }`, + bookKey, + ), + ExpectedError: "target document is already linked to another document.", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + + test := testUtils.TestCase{ + Description: "One to one update mutation using alias relation id from secondary side", + // This restiction is temporary due to a bug in the collection api, see + // https://github.com/sourcenetwork/defradb/issues/1703 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author": "%s" + }`, + author2Key, + ), + ExpectedError: "target document is already linked to another document.", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidLenSubKey := "35953ca-518d-9e6b-9ce6cd00eff5" + invalidAuthorKey := "bae-" + invalidLenSubKey + + test := testUtils.TestCase{ + Description: "One to one update mutation using invalid alias relation id", + // This restiction is temporary due to a bug in the collection api, see + // https://github.com/sourcenetwork/defradb/issues/1703 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author": "%s" + }`, + invalidAuthorKey, + ), + ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubKey + "\"", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" + + test := testUtils.TestCase{ + Description: "One to one update mutation using alias relation id from secondary side", + // This restiction is temporary due to a bug in the collection api, see + // https://github.com/sourcenetwork/defradb/issues/1703 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author": "%s" + }`, + invalidAuthorKey, + ), + ExpectedError: "no document for the given key exists", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + + test := testUtils.TestCase{ + Description: "One to one update mutation using relation alias name from secondary side, with a wrong field.", + // This restiction is temporary due to a bug in the collection api, see + // https://github.com/sourcenetwork/defradb/issues/1703 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "notName": "Unpainted Condo", + "author": "%s" + }`, + author2Key, + ), + ExpectedError: "The given field does not exist. Name: notName", + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go new file mode 100644 index 0000000000..2a4c93644d --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go @@ -0,0 +1,429 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// Note: This test should probably not pass, as it contains a +// reference to a document that doesnt exist. +func TestMutationUpdateOneToOneNoChild(t *testing.T) { + unknownKey := "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + + test := testUtils.TestCase{ + Description: "One to one create mutation, from the wrong side", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.UpdateDoc{ + CollectionID: 1, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "published_id": "%s" + }`, + unknownKey, + ), + }, + testUtils.Request{ + Request: `query { + Author { + name + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + }, + }, + }, + }, + } + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne(t *testing.T) { + bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + + test := testUtils.TestCase{ + Description: "One to one update mutation", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.UpdateDoc{ + CollectionID: 1, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "published_id": "%s" + }`, + bookKey, + ), + }, + testUtils.Request{ + Request: ` + query { + Book { + name + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "John Grisham", + }, + }, + }, + }, + testUtils.Request{ + Request: ` + query { + Author { + name + published { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + "published": map[string]any{ + "name": "Painted House", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { + authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + + test := testUtils.TestCase{ + Description: "One to one create mutation, from the secondary side", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author_id": "%s" + }`, + authorKey, + ), + }, + testUtils.Request{ + Request: ` + query { + Book { + name + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "John Grisham", + }, + }, + }, + }, + testUtils.Request{ + Request: ` + query { + Author { + name + published { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + "published": map[string]any{ + "name": "Painted House", + }, + }, + }, + }, + }, + } + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + + test := testUtils.TestCase{ + Description: "One to one update mutation using relation id from single side (wrong)", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 1, + DocID: 1, + Doc: fmt.Sprintf( + `{ + "published_id": "%s" + }`, + bookKey, + ), + ExpectedError: "target document is already linked to another document.", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + + test := testUtils.TestCase{ + Description: "One to one update mutation using relation id from secondary side", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author_id": "%s" + }`, + author2Key, + ), + ExpectedError: "target document is already linked to another document.", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidLenSubKey := "35953ca-518d-9e6b-9ce6cd00eff5" + invalidAuthorKey := "bae-" + invalidLenSubKey + + test := testUtils.TestCase{ + Description: "One to one update mutation using invalid relation id", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author_id": "%s" + }`, + invalidAuthorKey, + ), + ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubKey + "\"", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" + + test := testUtils.TestCase{ + Description: "One to one update mutation using relation id from secondary side", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "author_id": "%s" + }`, + invalidAuthorKey, + ), + ExpectedError: "no document for the given key exists", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { + author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + + test := testUtils.TestCase{ + Description: "One to one update mutation using relation id from secondary side, with a wrong field.", + // This restiction is temporary due to a bug in the collection api, see + // https://github.com/sourcenetwork/defradb/issues/1852 for more info. + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author_id": "%s" + }`, + author1Key, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 0, + DocID: 0, + Doc: fmt.Sprintf( + `{ + "notName": "Unpainted Condo", + "author_id": "%s" + }`, + author2Key, + ), + ExpectedError: "The given field does not exist. Name: notName", + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/mutation/simple/update/special/underscored_schema_test.go b/tests/integration/mutation/update/underscored_schema_test.go similarity index 54% rename from tests/integration/mutation/simple/update/special/underscored_schema_test.go rename to tests/integration/mutation/update/underscored_schema_test.go index ac137ea40d..7639458ae5 100644 --- a/tests/integration/mutation/simple/update/special/underscored_schema_test.go +++ b/tests/integration/mutation/update/underscored_schema_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package special +package update import ( "testing" @@ -16,39 +16,43 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -var myUserSchema = (` - type My_User { - name: String - } -`) - -func executeTestCase(t *testing.T, test testUtils.RequestTestCase) { - testUtils.ExecuteRequestTestCase(t, myUserSchema, []string{"My_User"}, test) -} - func TestMutationUpdateUnderscoredSchema(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "Simple update of schema with underscored name", - Request: `mutation { - update_My_User(data: "{\"name\": \"Fred\"}") { - _key - name - } - }`, - Docs: map[int][]string{ - 0: { - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type My_User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ "name": "John" }`, }, - }, - Results: []map[string]any{ - { - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", - "name": "Fred", + testUtils.UpdateDoc{ + Doc: `{ + "name": "Fred" + }`, + }, + testUtils.Request{ + Request: ` + query { + My_User { + name + } + } + `, + Results: []map[string]any{ + { + "name": "Fred", + }, + }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } diff --git a/tests/integration/mutation/update/with_delete_test.go b/tests/integration/mutation/update/with_delete_test.go new file mode 100644 index 0000000000..444d16f87c --- /dev/null +++ b/tests/integration/mutation/update/with_delete_test.go @@ -0,0 +1,55 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestUpdateSave_DeletedDoc_DoesNothing(t *testing.T) { + test := testUtils.TestCase{ + Description: "Save existing, deleted document", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // We only wish to test collection.Save in this test. + testUtils.CollectionSaveMutationType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.DeleteDoc{ + DocID: 0, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "name": "Fred" + }`, + ExpectedError: "a document with the given dockey has been deleted", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/with_filter_test.go b/tests/integration/mutation/update/with_filter_test.go new file mode 100644 index 0000000000..1b47ee6840 --- /dev/null +++ b/tests/integration/mutation/update/with_filter_test.go @@ -0,0 +1,111 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithBooleanFilter_ResultFilteredOut(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with boolean equals filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + verified: Boolean + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "verified": true + }`, + }, + testUtils.Request{ + // The update will result in a record that no longer matches the filter + Request: `mutation { + update_Users(filter: {verified: {_eq: true}}, data: "{\"verified\":false}") { + _key + name + verified + } + }`, + // As the record no longer matches the filter it is not returned + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithBooleanFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with boolean filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + verified: Boolean + points: Float + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "verified": true, + "points": 42.1 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Bob", + "verified": false, + "points": 66.6 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "verified": true, + "points": 33 + }`, + }, + testUtils.Request{ + Request: `mutation { + update_Users(filter: {verified: {_eq: true}}, data: "{\"points\": 59}") { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "Fred", + "points": float64(59), + }, + { + "name": "John", + "points": float64(59), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/with_id_test.go b/tests/integration/mutation/update/with_id_test.go new file mode 100644 index 0000000000..59b47bc234 --- /dev/null +++ b/tests/integration/mutation/update/with_id_test.go @@ -0,0 +1,97 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithId(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with id", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float + } + `, + }, + testUtils.CreateDoc{ + // bae-cc36febf-4029-52b3-a876-c99c6293f588 + Doc: `{ + "name": "John", + "points": 42.1 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Bob", + "points": 66.6 + }`, + }, + testUtils.Request{ + Request: `mutation { + update_Users(id: "bae-cc36febf-4029-52b3-a876-c99c6293f588", data: "{\"points\": 59}") { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": float64(59), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithNonExistantId(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with non existant id", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float + } + `, + }, + testUtils.CreateDoc{ + // bae-cc36febf-4029-52b3-a876-c99c6293f588 + Doc: `{ + "name": "John", + "points": 42.1 + }`, + }, + testUtils.Request{ + Request: `mutation { + update_Users(id: "bae-does-not-exist", data: "{\"points\": 59}") { + _key + name + points + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/with_ids_test.go b/tests/integration/mutation/update/with_ids_test.go new file mode 100644 index 0000000000..6e8ff33dab --- /dev/null +++ b/tests/integration/mutation/update/with_ids_test.go @@ -0,0 +1,76 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithIds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with ids", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float + } + `, + }, + testUtils.CreateDoc{ + // bae-cc36febf-4029-52b3-a876-c99c6293f588 + Doc: `{ + "name": "John", + "points": 42.1 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Bob", + "points": 66.6 + }`, + }, + testUtils.CreateDoc{ + // bae-4a99afc4-a70b-5702-9642-fc1eb9ffe901 + Doc: `{ + "name": "Fred", + "points": 33 + }`, + }, + testUtils.Request{ + Request: `mutation { + update_Users( + ids: ["bae-cc36febf-4029-52b3-a876-c99c6293f588", "bae-4a99afc4-a70b-5702-9642-fc1eb9ffe901"], + data: "{\"points\": 59}" + ) { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "Fred", + "points": float64(59), + }, + { + "name": "John", + "points": float64(59), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go index 24d20d8c31..311a088c86 100644 --- a/tests/integration/p2p.go +++ b/tests/integration/p2p.go @@ -121,11 +121,6 @@ type GetAllP2PCollections struct { // node 1 to see if it has been replicated. type WaitForSync struct{} -// AnyOf may be used as `Results` field where the value may -// be one of several values, yet the value of that field must be the same -// across all nodes due to strong eventual consistency. -type AnyOf []any - // connectPeers connects two existing, started, nodes as peers. It returns a channel // that will receive an empty struct upon sync completion of all expected peer-sync events. // diff --git a/tests/integration/query/one_to_many/simple_test.go b/tests/integration/query/one_to_many/simple_test.go index d7e886907e..9e4ad72fd5 100644 --- a/tests/integration/query/one_to_many/simple_test.go +++ b/tests/integration/query/one_to_many/simple_test.go @@ -167,7 +167,7 @@ func TestQueryOneToManyWithNonExistantParent(t *testing.T) { { "name": "Painted House", "rating": 4.9, - "Author": nil, + "author": nil, }, }, } diff --git a/tests/integration/query/one_to_many/utils.go b/tests/integration/query/one_to_many/utils.go index e8ae79efa2..d1e25df661 100644 --- a/tests/integration/query/one_to_many/utils.go +++ b/tests/integration/query/one_to_many/utils.go @@ -16,8 +16,6 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -type dataMap = map[string]any - var bookAuthorGQLSchema = (` type Book { name: String diff --git a/tests/integration/query/one_to_many/with_average_filter_test.go b/tests/integration/query/one_to_many/with_average_filter_test.go deleted file mode 100644 index 1404be5962..0000000000 --- a/tests/integration/query/one_to_many/with_average_filter_test.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package one_to_many - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) - -// This test asserts that only a single join is used - the _avg reuses the rendered join as they -// have matching filters (average adds a ne nil filter). -func TestQueryOneToManyWithAverageAndChildNeNilFilterSharesJoinField(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from many side with average", - Request: `query @explain { - Author { - name - _avg(published: {field: rating}) - published(filter: {rating: {_ne: null}}){ - name - } - } - }`, - Results: []dataMap{ - { - "explain": dataMap{ - "selectTopNode": dataMap{ - "averageNode": dataMap{ - "countNode": dataMap{ - "sources": []dataMap{ - { - "filter": dataMap{ - "rating": dataMap{ - "_ne": nil, - }, - }, - "fieldName": "published", - }, - }, - "sumNode": dataMap{ - "sources": []dataMap{ - { - "filter": dataMap{ - "rating": dataMap{ - "_ne": nil, - }, - }, - "fieldName": "published", - "childFieldName": "rating", - }, - }, - "selectNode": dataMap{ - "_keys": nil, - "filter": nil, - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "2", - "collectionName": "Author", - "spans": []dataMap{ - { - "start": "/2", - "end": "/3", - }, - }, - }, - }, - "subTypeName": "published", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "_keys": nil, - "filter": nil, - "scanNode": dataMap{ - "filter": dataMap{ - "rating": dataMap{ - "_ne": nil, - }, - }, - "collectionID": "1", - "collectionName": "Book", - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - } - - executeTestCase(t, test) -} diff --git a/tests/integration/query/one_to_many/with_cid_dockey_test.go b/tests/integration/query/one_to_many/with_cid_dockey_test.go index b792db5492..aa8dd1906e 100644 --- a/tests/integration/query/one_to_many/with_cid_dockey_test.go +++ b/tests/integration/query/one_to_many/with_cid_dockey_test.go @@ -60,7 +60,7 @@ import ( // }, // } -// testUtils.AssertPanicAndSkipChangeDetection(t, func() { executeTestCase(t, test) }) +// testUtils.AssertPanic(t, func() { executeTestCase(t, test) }) // } func TestQueryOneToManyWithCidAndDocKey(t *testing.T) { diff --git a/tests/integration/query/one_to_many/with_count_filter_test.go b/tests/integration/query/one_to_many/with_count_filter_test.go index 9deecae01f..b367e6c856 100644 --- a/tests/integration/query/one_to_many/with_count_filter_test.go +++ b/tests/integration/query/one_to_many/with_count_filter_test.go @@ -153,210 +153,3 @@ func TestQueryOneToManyWithCountWithFilterAndChildFilter(t *testing.T) { executeTestCase(t, test) } - -// This test asserts that only a single join is used - the _count reuses the rendered join as they -// have matching filters. -func TestQueryOneToManyWithCountWithFilterAndChildFilterSharesJoinField(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from many side with count with filter", - Request: `query @explain { - Author { - name - _count(published: {filter: {rating: {_ne: null}}}) - published(filter: {rating: {_ne: null}}){ - name - } - } - }`, - Results: []dataMap{ - { - "explain": dataMap{ - "selectTopNode": dataMap{ - "countNode": dataMap{ - "sources": []dataMap{ - { - "filter": dataMap{ - "rating": dataMap{ - "_ne": nil, - }, - }, - "fieldName": "published", - }, - }, - "selectNode": dataMap{ - "_keys": nil, - "filter": nil, - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "rootName": "author", - "root": dataMap{ - "scanNode": dataMap{ - "filter": nil, - "collectionID": "2", - "collectionName": "Author", - "spans": []dataMap{ - { - "start": "/2", - "end": "/3", - }, - }, - }, - }, - "subTypeName": "published", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "_keys": nil, - "filter": nil, - "scanNode": dataMap{ - "filter": dataMap{ - "rating": dataMap{ - "_ne": nil, - }, - }, - "collectionID": "1", - "collectionName": "Book", - "spans": []dataMap{ - { - "start": "/1", - "end": "/2", - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - } - - executeTestCase(t, test) -} - -// This test asserts that two joins are used - the _count cannot reuse the rendered join as they -// dont have matching filters. -func TestQueryOneToManyWithCountAndChildFilterDoesNotShareJoinField(t *testing.T) { - test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from many side with count", - Request: `query @explain { - Author { - name - _count(published: {}) - published(filter: {rating: {_ne: null}}){ - name - } - } - }`, - Results: []dataMap{ - { - "explain": dataMap{ - "selectTopNode": dataMap{ - "countNode": dataMap{ - "selectNode": dataMap{ - "_keys": nil, - "filter": nil, - "parallelNode": []dataMap{ - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "root": dataMap{ - "scanNode": dataMap{ - "collectionID": "2", - "collectionName": "Author", - "filter": nil, - "spans": []dataMap{ - { - "end": "/3", - "start": "/2", - }, - }, - }, - }, - "rootName": "author", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "_keys": nil, - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "Book", - "filter": dataMap{ - "rating": dataMap{ - "_ne": nil, - }, - }, - "spans": []dataMap{ - { - "end": "/2", - "start": "/1", - }, - }, - }, - }, - }, - }, - "subTypeName": "published", - }, - }, - { - "typeIndexJoin": dataMap{ - "joinType": "typeJoinMany", - "root": dataMap{ - "scanNode": dataMap{ - "collectionID": "2", - "collectionName": "Author", - "filter": nil, - "spans": []dataMap{ - { - "end": "/3", - "start": "/2", - }, - }, - }, - }, - "rootName": "author", - "subType": dataMap{ - "selectTopNode": dataMap{ - "selectNode": dataMap{ - "_keys": nil, - "filter": nil, - "scanNode": dataMap{ - "collectionID": "1", - "collectionName": "Book", - "filter": nil, - "spans": []dataMap{ - { - "end": "/2", - "start": "/1", - }, - }, - }, - }, - }, - }, - "subTypeName": "published", - }, - }, - }, - }, - "sources": []dataMap{ - { - "fieldName": "published", - "filter": nil, - }, - }, - }, - }, - }, - }, - }, - } - - executeTestCase(t, test) -} diff --git a/tests/integration/query/one_to_many/with_dockeys_test.go b/tests/integration/query/one_to_many/with_dockeys_test.go index fb02e37336..1c58e5947f 100644 --- a/tests/integration/query/one_to_many/with_dockeys_test.go +++ b/tests/integration/query/one_to_many/with_dockeys_test.go @@ -50,7 +50,7 @@ func TestQueryOneToManyWithChildDocKeys(t *testing.T) { "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, `{ - "name": "A Time for Mercy", + "name": "The Firm", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, diff --git a/tests/integration/query/one_to_many/with_filter_test.go b/tests/integration/query/one_to_many/with_filter_test.go index 72f62d6abd..322f1581bc 100644 --- a/tests/integration/query/one_to_many/with_filter_test.go +++ b/tests/integration/query/one_to_many/with_filter_test.go @@ -17,274 +17,442 @@ import ( ) func TestQueryOneToManyWithNumericGreaterThanFilterOnParent(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-many relation query from the many side, simple filter", - Request: `query { - Author(filter: {age: {_gt: 63}}) { - name - age - published { - name - rating - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "A Time for Mercy", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "Theif Lord", "rating": 4.8, "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" }`, }, - //authors - 1: { + testUtils.CreateDoc{ + CollectionID: 1, // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + Doc: `{ "name": "John Grisham", "age": 65, "verified": true }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false }`, }, - }, - Results: []map[string]any{ - { - "name": "John Grisham", - "age": uint64(65), - "published": []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - }, + testUtils.Request{ + Request: `query { + Author(filter: {age: {_gt: 63}}) { + name + age + published { + name + rating + } + } + }`, + Results: []map[string]any{ { - "name": "A Time for Mercy", - "rating": 4.5, + "name": "John Grisham", + "age": uint64(65), + "published": []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + }, + { + "name": "A Time for Mercy", + "rating": 4.5, + }, + }, }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToManyWithNumericGreaterThanChildFilterOnParentWithUnrenderedChild(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-many relation query from the many side, simple filter", - Request: `query { - Author(filter: {published: {rating: {_gt: 4.8}}}) { - name - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "A Time for Mercy", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "Theif Lord", "rating": 4.8, "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" }`, }, - //authors - 1: { + testUtils.CreateDoc{ + CollectionID: 1, // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + Doc: `{ "name": "John Grisham", "age": 65, "verified": true }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false }`, }, - }, - Results: []map[string]any{ - { - "name": "John Grisham", + testUtils.Request{ + Request: `query { + Author(filter: {published: {rating: {_gt: 4.8}}, age: {_gt: 63}}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + }, + }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToManyWithNumericGreaterThanFilterOnParentAndChild(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-many relation query from the many side, simple filter on root and sub type", - Request: `query { - Author(filter: {age: {_gt: 63}}) { - name - age - published(filter: {rating: {_gt: 4.6}}) { - name - rating - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "A Time for Mercy", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "Theif Lord", "rating": 4.8, "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" }`, }, - //authors - 1: { + testUtils.CreateDoc{ + CollectionID: 1, // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + Doc: `{ "name": "John Grisham", "age": 65, "verified": true }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false }`, }, - }, - Results: []map[string]any{ - { - "name": "John Grisham", - "age": uint64(65), - "published": []map[string]any{ + testUtils.Request{ + Request: `query { + Author(filter: {age: {_gt: 63}}) { + name + age + published(filter: {rating: {_gt: 4.6}}) { + name + rating + } + } + }`, + Results: []map[string]any{ { - "name": "Painted House", - "rating": 4.9, + "name": "John Grisham", + "age": uint64(65), + "published": []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + }, + }, }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToManyWithMultipleAliasedFilteredChildren(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-many relation query from the many side, simple filter on root and sub type", - Request: `query { - Author { - name - age - p1: published(filter: {rating: {_gt: 4.6}}) { - name - rating - } - p2: published(filter: {rating: {_lt: 4.6}}) { - name - rating - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "A Time for Mercy", "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" }`, - `{ + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ "name": "Theif Lord", "rating": 4.8, "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" }`, }, - //authors - 1: { + testUtils.CreateDoc{ + CollectionID: 1, // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + Doc: `{ "name": "John Grisham", "age": 65, "verified": true }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false }`, }, - }, - Results: []map[string]any{ - { - "name": "John Grisham", - "age": uint64(65), - "p1": []map[string]any{ + testUtils.Request{ + Request: `query { + Author { + name + age + p1: published(filter: {rating: {_gt: 4.6}}) { + name + rating + } + p2: published(filter: {rating: {_lt: 4.6}}) { + name + rating + } + } + }`, + Results: []map[string]any{ { - "name": "Painted House", - "rating": 4.9, + "name": "John Grisham", + "age": uint64(65), + "p1": []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + }, + }, + "p2": []map[string]any{ + { + "name": "A Time for Mercy", + "rating": 4.5, + }, + }, }, - }, - "p2": []map[string]any{ { - "name": "A Time for Mercy", - "rating": 4.5, + "name": "Cornelia Funke", + "age": uint64(62), + "p1": []map[string]any{ + { + "name": "Theif Lord", + "rating": 4.8, + }, + }, + "p2": []map[string]any{}, }, }, }, - { - "name": "Cornelia Funke", - "age": uint64(62), - "p1": []map[string]any{ + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToManyWithCompoundOperatorInFilterAndRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-many relation query filter with compound operator and relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Painted House", + "rating": 4.9, + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "A Time for Mercy", + "rating": 4.5, + "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Theif Lord", + "rating": 4.8, + "author_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "The Lord of the Rings", + "rating": 5.0, + "author_id": "bae-61d279c1-eab9-56ec-8654-dce0324ebfda" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 + Doc: `{ + "name": "Cornelia Funke", + "age": 62, + "verified": false + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-61d279c1-eab9-56ec-8654-dce0324ebfda + Doc: `{ + "name": "John Tolkien", + "age": 70, + "verified": true + }`, + }, + testUtils.Request{ + Request: `query { + Author(filter: {_or: [ + {_and: [ + {published: {rating: {_lt: 5.0}}}, + {published: {rating: {_gt: 4.8}}} + ]}, + {_and: [ + {age: {_le: 65}}, + {published: {name: {_like: "%Lord%"}}} + ]}, + ]}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + }, { - "name": "Theif Lord", - "rating": 4.8, + "name": "Cornelia Funke", }, }, - "p2": []map[string]any{}, + }, + testUtils.Request{ + Request: `query { + Author(filter: {_and: [ + { _not: {published: {rating: {_gt: 4.8}}}}, + { _not: {published: {rating: {_lt: 4.8}}}} + ]}) { + name + } + }`, + Results: []map[string]any{{ + "name": "Cornelia Funke", + }}, }, }, } - - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } diff --git a/tests/integration/query/one_to_many_to_one/with_filter_test.go b/tests/integration/query/one_to_many_to_one/with_filter_test.go index 7f15fe58a0..99890196bb 100644 --- a/tests/integration/query/one_to_many_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_many_to_one/with_filter_test.go @@ -284,3 +284,122 @@ func TestOneToManyToOneWithTwoLevelDeepFilter(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestOneToManyToOneWithCompoundOperatorInFilterAndRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "1-N-1 two level deep filter with compound operator and relation", + Actions: []any{ + gqlSchemaOneToManyToOne(), + createDocsWith6BooksAnd5Publishers(), + testUtils.CreateDoc{ + CollectionID: 0, + // bae-61d279c1-eab9-56ec-8654-dce0324ebfda + Doc: `{ + "name": "John Tolkien", + "age": 70, + "verified": true + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-0718e995-e7b5-55b1-874a-8f7d956be53c + Doc: `{ + "name": "The Lord of the Rings", + "rating": 5.0, + "author_id": "bae-61d279c1-eab9-56ec-8654-dce0324ebfda" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 2, + Doc: `{ + "name": "Allen & Unwin", + "address": "1 Allen Ave., Sydney, Australia", + "yearOpened": 1954, + "book_id": "bae-0718e995-e7b5-55b1-874a-8f7d956be53c" + }`, + }, + testUtils.Request{ + Request: `query { + Author (filter: {_and: [ + {age: {_gt: 50}}, + {_or: [ + {book: {publisher: {yearOpened: {_gt: 2020}}}}, + {book: {publisher: {yearOpened: {_lt: 1960}}}} + ]} + ]}){ + name + } + }`, + Results: []map[string]any{ + { + "name": "John Tolkien", + }, + { + "name": "Cornelia Funke", + }, + }, + }, + testUtils.Request{ + Request: `query { + Author (filter: {_and: [ + {_not: {age: {_ge: 70}}}, + {book: {rating: {_gt: 2.5}}}, + {_or: [ + {book: {publisher: {yearOpened: {_le: 2020}}}}, + {_not: {book: {rating: {_le: 4.0}}}} + ]} + ]}){ + name + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestOneToManyToOneWithCompoundOperatorInSubFilterAndRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "1-N-1 with sub filter with compound operator and relation", + Actions: []any{ + gqlSchemaOneToManyToOne(), + createDocsWith6BooksAnd5Publishers(), + testUtils.Request{ + Request: `query { + Author (filter: {_and: [ + {age: {_gt: 20}}, + {_or: [ + {book: {publisher: {yearOpened: {_lt: 2020}}}}, + {book: {rating: { _lt: 1}}} + ]} + ]}){ + name + book (filter: {_and: [ + {publisher: {yearOpened: {_lt: 2020}}}, + {_or: [ + {rating: { _lt: 3.4}}, + {publisher: {name: {_eq: "Not existing publisher"}}} + ]} + ]}){ + name + } + } + }`, + Results: []map[string]any{{ + "name": "John Grisham", + "book": []map[string]any{{ + "name": "Sooley", + }}, + }}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/one_to_one/simple_test.go b/tests/integration/query/one_to_one/simple_test.go index 5369893de6..1fcefa0606 100644 --- a/tests/integration/query/one_to_one/simple_test.go +++ b/tests/integration/query/one_to_one/simple_test.go @@ -282,3 +282,105 @@ func TestQueryOneToOneWithNilParent(t *testing.T) { executeTestCase(t, test) } + +func TestQueryOneToOne_WithRelationIDFromPrimarySide(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation primary direction, relation ID field", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + // bae-3d236f89-6a31-5add-a36a-27971a2eac76 + CollectionID: 0, + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + }`, + }, + testUtils.Request{ + Request: `query { + Author { + name + published_id + } + }`, + Results: []map[string]any{ + { + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOne_WithRelationIDFromSecondarySide(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation secondary direction, relation ID field", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + // bae-3d236f89-6a31-5add-a36a-27971a2eac76 + CollectionID: 0, + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + }`, + }, + testUtils.Request{ + Request: `query { + Book { + name + author_id + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/one_to_one/with_id_field_test.go b/tests/integration/query/one_to_one/with_clashing_id_field_test.go similarity index 86% rename from tests/integration/query/one_to_one/with_id_field_test.go rename to tests/integration/query/one_to_one/with_clashing_id_field_test.go index ecb0929981..f563f1e358 100644 --- a/tests/integration/query/one_to_one/with_id_field_test.go +++ b/tests/integration/query/one_to_one/with_clashing_id_field_test.go @@ -16,7 +16,8 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryOneToOneWithIdFieldOnSecondary(t *testing.T) { +// This documents unwanted behaviour, see https://github.com/sourcenetwork/defradb/issues/1520 +func TestQueryOneToOneWithClashingIdFieldOnSecondary(t *testing.T) { test := testUtils.TestCase{ Description: "One-to-one relation secondary direction, id field with name clash on secondary side", Actions: []any{ @@ -62,7 +63,7 @@ func TestQueryOneToOneWithIdFieldOnSecondary(t *testing.T) { Results: []map[string]any{ { "name": "Painted House", - "author_id": uint64(123456), + "author_id": "bae-9d67a886-64e3-520b-8cd5-1ca7b098fabe", "author": map[string]any{ "name": "John Grisham", }, @@ -76,7 +77,7 @@ func TestQueryOneToOneWithIdFieldOnSecondary(t *testing.T) { } // This documents unwanted behaviour, see https://github.com/sourcenetwork/defradb/issues/1520 -func TestQueryOneToOneWithIdFieldOnPrimary(t *testing.T) { +func TestQueryOneToOneWithClashingIdFieldOnPrimary(t *testing.T) { test := testUtils.TestCase{ Description: "One-to-one relation primary direction, id field with name clash on primary side", Actions: []any{ @@ -108,7 +109,7 @@ func TestQueryOneToOneWithIdFieldOnPrimary(t *testing.T) { "name": "John Grisham", "published_id": "bae-d82dbe47-9df1-5e33-bd87-f92e9c378161" }`, - ExpectedError: "value doesn't contain number; it contains string", + ExpectedError: "target document is already linked to another document.", }, }, } diff --git a/tests/integration/query/one_to_one/with_count_filter_test.go b/tests/integration/query/one_to_one/with_count_filter_test.go new file mode 100644 index 0000000000..c005acac01 --- /dev/null +++ b/tests/integration/query/one_to_one/with_count_filter_test.go @@ -0,0 +1,119 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// This test documents a bug and should be altered with: +// https://github.com/sourcenetwork/defradb/issues/1869 +func TestQueryOneToOneWithCountWithCompoundOrFilterThatIncludesRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation with count with _or filter that includes relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ + "name": "Painted House", + "rating": 4.9 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8 + Doc: `{ + "name": "Some Book", + "rating": 4.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-1c890922-ddf9-5820-a888-c7f977848934 + Doc: `{ + "name": "Some Other Book", + "rating": 3.5 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-e8642720-08cb-5f5b-a8d6-7187c444a78d + Doc: `{ + "name": "Yet Another Book", + "rating": 3.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true, + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Writer", + "age": 45, + "verified": false, + "published_id": "bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Other Writer", + "age": 35, + "verified": false, + "published_id": "bae-1c890922-ddf9-5820-a888-c7f977848934" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Yet Another Writer", + "age": 30, + "verified": false, + "published_id": "bae-e8642720-08cb-5f5b-a8d6-7187c444a78d" + }`, + }, + testUtils.Request{ + Request: `query { + _count(Book: {filter: {_or: [ + {_not: {author: {age: {_lt: 65}}} }, + {_not: {author: {age: {_gt: 30}}} } + ]}}) + }`, + Results: []map[string]any{ + { + "_count": "2", + }, + }, + }, + }, + } + + testUtils.AssertPanic( + t, + func() { + testUtils.ExecuteTestCase(t, test) + }, + ) +} diff --git a/tests/integration/query/one_to_one/with_filter_test.go b/tests/integration/query/one_to_one/with_filter_test.go index 88bc48a03e..a4b6abf6de 100644 --- a/tests/integration/query/one_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_one/with_filter_test.go @@ -17,235 +17,478 @@ import ( ) func TestQueryOneToOneWithNumericFilterOnParent(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation query with simple filter on sub type", - Request: `query { - Book { - name - rating - author(filter: {age: {_eq: 65}}) { - name - age - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, }, - }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - "age": uint64(65), + testUtils.Request{ + Request: `query { + Book { + name + rating + author(filter: {age: {_eq: 65}}) { + name + age + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + "age": uint64(65), + }, + }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToOneWithStringFilterOnChild(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation query with simple filter on parent", - Request: `query { - Book(filter: {name: {_eq: "Painted House"}}) { - name - rating - author { - name - age - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, }, - }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - "age": uint64(65), + testUtils.Request{ + Request: `query { + Book(filter: {name: {_eq: "Painted House"}}) { + name + rating + author { + name + age + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + "age": uint64(65), + }, + }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToOneWithBooleanFilterOnChild(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation query with simple sub filter on child", - Request: `query { - Book(filter: {author: {verified: {_eq: true}}}) { - name - rating - author { - name - age - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, }, - }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - "age": uint64(65), + testUtils.Request{ + Request: `query { + Book(filter: {author: {verified: {_eq: true}}}) { + name + rating + author { + name + age + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + "age": uint64(65), + }, + }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToOneWithFilterThroughChildBackToParent(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation query with filter on parent referencing parent through child", - Request: `query { - Book(filter: {author: {published: {rating: {_eq: 4.9}}}}) { - name - rating - author { - name - age - } - } - }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, // bae-d432bdfb-787d-5a1c-ac29-dc025ab80095 - `{ + Doc: `{ "name": "Theif Lord", "rating": 4.8 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, // bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04 - `{ + Doc: `{ "name": "Cornelia Funke", "age": 62, "verified": false, "published_id": "bae-d432bdfb-787d-5a1c-ac29-dc025ab80095" }`, }, - }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - "age": uint64(65), + testUtils.Request{ + Request: `query { + Book(filter: {author: {published: {rating: {_eq: 4.9}}}}) { + name + rating + author { + name + age + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + "age": uint64(65), + }, + }, }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } func TestQueryOneToOneWithBooleanFilterOnChildWithNoSubTypeSelection(t *testing.T) { - test := testUtils.RequestTestCase{ + test := testUtils.TestCase{ Description: "One-to-one relation with simple sub filter on child, but not child selections", - Request: `query { + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ + "name": "Painted House", + "rating": 4.9 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true, + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, + testUtils.Request{ + Request: `query { Book(filter: {author: {verified: {_eq: true}}}) { name rating } }`, - Docs: map[int][]string{ - //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d - `{ + Results: []map[string]any{{ + "name": "Painted House", + "rating": 4.9, + }}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithCompoundAndFilterThatIncludesRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation with _and filter that includes relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ "name": "Painted House", "rating": 4.9 }`, }, - //authors - 1: { // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 - `{ + testUtils.CreateDoc{ + CollectionID: 0, + // bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8 + Doc: `{ + "name": "Some Book", + "rating": 4.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-500a9445-bd90-580e-9191-d2d0ec1a5cf5 + Doc: `{ + "name": "Some Other Book", + "rating": 3.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ "name": "John Grisham", "age": 65, "verified": true, "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Writer", + "age": 45, + "verified": false, + "published_id": "bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Other Writer", + "age": 30, + "verified": true, + "published_id": "bae-500a9445-bd90-580e-9191-d2d0ec1a5cf5" + }`, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_and: [ + {rating: {_ge: 4.0}}, + {author: {verified: {_eq: true}}} + ]}) { + name + rating + } + }`, + Results: []map[string]any{{ + "name": "Painted House", + "rating": 4.9, + }}, + }, }, - Results: []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation with _or filter that includes relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: bookAuthorGQLSchema, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + Doc: `{ + "name": "Painted House", + "rating": 4.9 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8 + Doc: `{ + "name": "Some Book", + "rating": 4.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-1c890922-ddf9-5820-a888-c7f977848934 + Doc: `{ + "name": "Some Other Book", + "rating": 3.5 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-e8642720-08cb-5f5b-a8d6-7187c444a78d + Doc: `{ + "name": "Yet Another Book", + "rating": 3.0 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-41598f0c-19bc-5da6-813b-e80f14a10df3 + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true, + "published_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Writer", + "age": 45, + "verified": false, + "published_id": "bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Some Other Writer", + "age": 35, + "verified": false, + "published_id": "bae-1c890922-ddf9-5820-a888-c7f977848934" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Yet Another Writer", + "age": 30, + "verified": false, + "published_id": "bae-e8642720-08cb-5f5b-a8d6-7187c444a78d" + }`, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_or: [ + {_and: [ + {rating: {_ge: 4.0}}, + {author: {age: {_le: 45}}} + ]}, + {_and: [ + {rating: {_le: 3.5}}, + {author: {age: {_ge: 35}}} + ]} + ]}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Some Other Book", + }, + { + "name": "Some Book", + }, + }, + }, + testUtils.Request{ + Request: `query { + Book(filter: {_or: [ + {_not: {author: {age: {_lt: 65}}} }, + {_not: {author: {age: {_gt: 30}}} } + ]}) { + name + } + }`, + Results: []map[string]any{ + { + "name": "Yet Another Book", + }, + { + "name": "Painted House", + }, + }, }, }, } - executeTestCase(t, test) + testUtils.ExecuteTestCase(t, test) } diff --git a/tests/integration/query/one_to_one/with_group_related_id_alias_test.go b/tests/integration/query/one_to_one/with_group_related_id_alias_test.go index cbe75c9318..eb53ab5d47 100644 --- a/tests/integration/query/one_to_one/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_one/with_group_related_id_alias_test.go @@ -26,7 +26,7 @@ func TestQueryOneToOneWithGroupRelatedIDAlias(t *testing.T) { name: String author: Author @primary } - + type Author { name: String published: Book @@ -106,9 +106,233 @@ func TestQueryOneToOneWithGroupRelatedIDAlias(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -// This test documents unwanted behaviour, see: -// https://github.com/sourcenetwork/defradb/issues/1654 -func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondary(t *testing.T) { +func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithoutInnerGroup(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation query with group by related id alias (secondary side)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-3d236f89-6a31-5add-a36a-27971a2eac76 + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6 + Doc: `{ + "name": "Go Guide for Rust developers" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-6b624301-3d0a-5336-bd2c-ca00bca3de85 + Doc: `{ + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c + Doc: `{ + "name": "Andrew Lone", + "published_id": "bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6" + }`, + }, + testUtils.Request{ + Request: `query { + Book(groupBy: [author]) { + author_id + } + }`, + Results: []map[string]any{ + { + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", + }, + { + "author_id": "bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithoutInnerGroupWithJoin(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation query with group by related id alias (secondary side)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-3d236f89-6a31-5add-a36a-27971a2eac76 + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6 + Doc: `{ + "name": "Go Guide for Rust developers" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-6b624301-3d0a-5336-bd2c-ca00bca3de85 + Doc: `{ + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c + Doc: `{ + "name": "Andrew Lone", + "published_id": "bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6" + }`, + }, + testUtils.Request{ + Request: `query { + Book(groupBy: [author]) { + author_id + author { + name + } + } + }`, + Results: []map[string]any{ + { + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", + "author": map[string]any{ + "name": "John Grisham", + }, + }, + { + "author_id": "bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c", + "author": map[string]any{ + "name": "Andrew Lone", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithInnerGroup(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation query with group by related id alias (secondary side)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-3d236f89-6a31-5add-a36a-27971a2eac76 + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6 + Doc: `{ + "name": "Go Guide for Rust developers" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-6b624301-3d0a-5336-bd2c-ca00bca3de85 + Doc: `{ + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c + Doc: `{ + "name": "Andrew Lone", + "published_id": "bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6" + }`, + }, + testUtils.Request{ + Request: `query { + Book(groupBy: [author]) { + author_id + _group { + name + } + } + }`, + Results: []map[string]any{ + { + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", + "_group": []map[string]any{ + { + "name": "Painted House", + }, + }, + }, + { + "author_id": "bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c", + "_group": []map[string]any{ + { + "name": "Go Guide for Rust developers", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithInnerGroupWithJoin(t *testing.T) { test := testUtils.TestCase{ Description: "One-to-one relation query with group by related id alias (secondary side)", Actions: []any{ @@ -169,14 +393,22 @@ func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondary(t *testing.T) { }`, Results: []map[string]any{ { - "author_id": nil, + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", "author": map[string]any{ - "name": "Andrew Lone", + "name": "John Grisham", }, "_group": []map[string]any{ { "name": "Painted House", }, + }, + }, + { + "author_id": "bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c", + "author": map[string]any{ + "name": "Andrew Lone", + }, + "_group": []map[string]any{ { "name": "Go Guide for Rust developers", }, diff --git a/tests/integration/query/one_to_one/with_group_related_id_test.go b/tests/integration/query/one_to_one/with_group_related_id_test.go index 50a1111475..45b432192d 100644 --- a/tests/integration/query/one_to_one/with_group_related_id_test.go +++ b/tests/integration/query/one_to_one/with_group_related_id_test.go @@ -97,9 +97,7 @@ func TestQueryOneToOneWithGroupRelatedID(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -// This test documents unwanted behaviour, see: -// https://github.com/sourcenetwork/defradb/issues/1654 -func TestQueryOneToOneWithGroupRelatedIDFromSecondary(t *testing.T) { +func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithoutGroup(t *testing.T) { test := testUtils.TestCase{ Description: "One-to-one relation query with group by related id (secondary side)", Actions: []any{ @@ -150,6 +148,235 @@ func TestQueryOneToOneWithGroupRelatedIDFromSecondary(t *testing.T) { Request: `query { Book(groupBy: [author_id]) { author_id + } + }`, + Results: []map[string]any{ + { + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", + }, + { + "author_id": "bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithoutGroupWithJoin(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation query with group by related id (secondary side)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-3d236f89-6a31-5add-a36a-27971a2eac76 + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6 + Doc: `{ + "name": "Go Guide for Rust developers" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-6b624301-3d0a-5336-bd2c-ca00bca3de85 + Doc: `{ + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c + Doc: `{ + "name": "Andrew Lone", + "published_id": "bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6" + }`, + }, + testUtils.Request{ + Request: `query { + Book(groupBy: [author_id]) { + author_id + author { + name + } + } + }`, + Results: []map[string]any{ + { + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", + "author": map[string]any{ + "name": "John Grisham", + }, + }, + { + "author_id": "bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c", + "author": map[string]any{ + "name": "Andrew Lone", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithGroup(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation query with group by related id (secondary side)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-3d236f89-6a31-5add-a36a-27971a2eac76 + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6 + Doc: `{ + "name": "Go Guide for Rust developers" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-6b624301-3d0a-5336-bd2c-ca00bca3de85 + Doc: `{ + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c + Doc: `{ + "name": "Andrew Lone", + "published_id": "bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6" + }`, + }, + testUtils.Request{ + Request: `query { + Book(groupBy: [author_id]) { + author_id + _group { + name + } + } + }`, + Results: []map[string]any{ + { + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", + "_group": []map[string]any{ + { + "name": "Painted House", + }, + }, + }, + { + "author_id": "bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c", + "_group": []map[string]any{ + { + "name": "Go Guide for Rust developers", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithGroupWithJoin(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-one relation query with group by related id (secondary side)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-3d236f89-6a31-5add-a36a-27971a2eac76 + Doc: `{ + "name": "Painted House" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6 + Doc: `{ + "name": "Go Guide for Rust developers" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-6b624301-3d0a-5336-bd2c-ca00bca3de85 + Doc: `{ + "name": "John Grisham", + "published_id": "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c + Doc: `{ + "name": "Andrew Lone", + "published_id": "bae-d6627fea-8bf7-511c-bcf9-bac4212bddd6" + }`, + }, + testUtils.Request{ + Request: `query { + Book(groupBy: [author_id]) { + author_id + author { + name + } _group { name } @@ -157,11 +384,22 @@ func TestQueryOneToOneWithGroupRelatedIDFromSecondary(t *testing.T) { }`, Results: []map[string]any{ { - "author_id": nil, + "author_id": "bae-6b624301-3d0a-5336-bd2c-ca00bca3de85", + "author": map[string]any{ + "name": "John Grisham", + }, "_group": []map[string]any{ { "name": "Painted House", }, + }, + }, + { + "author_id": "bae-92fa9dcb-c1ee-5b84-b2f6-e9437c7f261c", + "author": map[string]any{ + "name": "Andrew Lone", + }, + "_group": []map[string]any{ { "name": "Go Guide for Rust developers", }, diff --git a/tests/integration/query/one_to_one_multiple/simple_test.go b/tests/integration/query/one_to_one_multiple/simple_test.go new file mode 100644 index 0000000000..cc5a97c117 --- /dev/null +++ b/tests/integration/query/one_to_one_multiple/simple_test.go @@ -0,0 +1,335 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one_multiple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryOneToOneMultiple_FromPrimary(t *testing.T) { + test := testUtils.TestCase{ + Description: "Multiple one-to-one joins from primary direction", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Publisher { + name: String + printed: Book + } + + type Author { + name: String + published: Book + } + + type Book { + name: String + publisher: Publisher @primary + author: Author @primary + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // "bae-1f4cc394-08a8-5825-87b9-b02de2f25f7d" + Doc: `{ + "name": "Old Publisher" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // "bae-a3cd6fac-13c0-5c8f-970b-0ce7abbb49a5" + Doc: `{ + "name": "New Publisher" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-b6ea52b8-a5a5-5127-b9c0-5df4243457a3 + Doc: `{ + "name": "Cornelia Funke" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 2, + Doc: `{ + "name": "Painted House", + "publisher_id": "bae-1f4cc394-08a8-5825-87b9-b02de2f25f7d", + "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 2, + Doc: `{ + "name": "Theif Lord", + "publisher_id": "bae-a3cd6fac-13c0-5c8f-970b-0ce7abbb49a5", + "author_id": "bae-b6ea52b8-a5a5-5127-b9c0-5df4243457a3" + }`, + }, + testUtils.Request{ + Request: `query { + Book { + name + publisher { + name + } + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "publisher": map[string]any{ + "name": "Old Publisher", + }, + "author": map[string]any{ + "name": "John Grisham", + }, + }, + { + "name": "Theif Lord", + "publisher": map[string]any{ + "name": "New Publisher", + }, + "author": map[string]any{ + "name": "Cornelia Funke", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneMultiple_FromMixedPrimaryAndSecondary(t *testing.T) { + test := testUtils.TestCase{ + Description: "Multiple one-to-one joins from primary direction", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Publisher { + name: String + printed: Book @primary + } + + type Author { + name: String + published: Book + } + + type Book { + name: String + publisher: Publisher + author: Author @primary + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // "bae-1f4cc394-08a8-5825-87b9-b02de2f25f7d" + Doc: `{ + "name": "Old Publisher" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // "bae-a3cd6fac-13c0-5c8f-970b-0ce7abbb49a5" + Doc: `{ + "name": "New Publisher" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-b6ea52b8-a5a5-5127-b9c0-5df4243457a3 + Doc: `{ + "name": "Cornelia Funke" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 2, + Doc: `{ + "name": "Painted House", + "publisher_id": "bae-1f4cc394-08a8-5825-87b9-b02de2f25f7d", + "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 2, + Doc: `{ + "name": "Theif Lord", + "publisher_id": "bae-a3cd6fac-13c0-5c8f-970b-0ce7abbb49a5", + "author_id": "bae-b6ea52b8-a5a5-5127-b9c0-5df4243457a3" + }`, + }, + testUtils.Request{ + Request: `query { + Book { + name + publisher { + name + } + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "publisher": map[string]any{ + "name": "Old Publisher", + }, + "author": map[string]any{ + "name": "John Grisham", + }, + }, + { + "name": "Theif Lord", + "publisher": map[string]any{ + "name": "New Publisher", + }, + "author": map[string]any{ + "name": "Cornelia Funke", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOneMultiple_FromSecondary(t *testing.T) { + test := testUtils.TestCase{ + Description: "Multiple one-to-one joins from primary direction", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Publisher { + name: String + printed: Book @primary + } + + type Author { + name: String + published: Book @primary + } + + type Book { + name: String + publisher: Publisher + author: Author + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // "bae-1f4cc394-08a8-5825-87b9-b02de2f25f7d" + Doc: `{ + "name": "Old Publisher" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + // "bae-a3cd6fac-13c0-5c8f-970b-0ce7abbb49a5" + Doc: `{ + "name": "New Publisher" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + // bae-b6ea52b8-a5a5-5127-b9c0-5df4243457a3 + Doc: `{ + "name": "Cornelia Funke" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 2, + Doc: `{ + "name": "Painted House", + "publisher_id": "bae-1f4cc394-08a8-5825-87b9-b02de2f25f7d", + "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 2, + Doc: `{ + "name": "Theif Lord", + "publisher_id": "bae-a3cd6fac-13c0-5c8f-970b-0ce7abbb49a5", + "author_id": "bae-b6ea52b8-a5a5-5127-b9c0-5df4243457a3" + }`, + }, + testUtils.Request{ + Request: `query { + Book { + name + publisher { + name + } + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "publisher": map[string]any{ + "name": "Old Publisher", + }, + "author": map[string]any{ + "name": "John Grisham", + }, + }, + { + "name": "Theif Lord", + "publisher": map[string]any{ + "name": "New Publisher", + }, + "author": map[string]any{ + "name": "Cornelia Funke", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_deleted_field_test.go b/tests/integration/query/simple/with_deleted_field_test.go new file mode 100644 index 0000000000..182cce3280 --- /dev/null +++ b/tests/integration/query/simple/with_deleted_field_test.go @@ -0,0 +1,67 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithDeletedField(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy" + }`, + }, + testUtils.DeleteDoc{ + DocID: 0, + }, + testUtils.DeleteDoc{ + DocID: 1, + }, + testUtils.Request{ + Request: `query { + User(showDeleted: true) { + _deleted + name + } + }`, + Results: []map[string]any{ + { + "_deleted": true, + "name": "Andy", + }, + { + "_deleted": true, + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_filter/with_not_test.go b/tests/integration/query/simple/with_filter/with_not_test.go index 8ec86c15dd..3b5832bcdb 100644 --- a/tests/integration/query/simple/with_filter/with_not_test.go +++ b/tests/integration/query/simple/with_filter/with_not_test.go @@ -64,6 +64,46 @@ func TestQuerySimple_WithNotEqualToXFilter_NoError(t *testing.T) { executeTestCase(t, test) } +func TestQuerySimple_WithNotAndComparisonXFilter_NoError(t *testing.T) { + test := testUtils.RequestTestCase{ + Description: "Simple query with _not filter with _gt condition)", + Request: `query { + Users(filter: {_not: {Age: {_gt: 20}}}) { + Name + Age + } + }`, + Docs: map[int][]string{ + 0: { + `{ + "Name": "John", + "Age": 21 + }`, + `{ + "Name": "Bob", + "Age": 32 + }`, + `{ + "Name": "Carlo", + "Age": 55 + }`, + `{ + "Name": "Alice", + "Age": 19 + }`, + }, + }, + Results: []map[string]any{ + { + "Name": "Alice", + "Age": uint64(19), + }, + }, + } + + executeTestCase(t, test) +} + func TestQuerySimple_WithNotEqualToXorYFilter_NoError(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with logical compound filter (not)", diff --git a/tests/integration/results.go b/tests/integration/results.go new file mode 100644 index 0000000000..052de310c5 --- /dev/null +++ b/tests/integration/results.go @@ -0,0 +1,180 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package tests + +import ( + "encoding/json" + "testing" + + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/assert" +) + +// AnyOf may be used as `Results` field where the value may +// be one of several values, yet the value of that field must be the same +// across all nodes due to strong eventual consistency. +type AnyOf []any + +// assertResultsAnyOf asserts that actual result is equal to at least one of the expected results. +// +// The comparison is relaxed when using client types other than goClientType. +func assertResultsAnyOf(t *testing.T, client ClientType, expected AnyOf, actual any, msgAndArgs ...any) { + switch client { + case httpClientType: + if !areResultsAnyOf(expected, actual) { + assert.Contains(t, expected, actual, msgAndArgs...) + } + default: + assert.Contains(t, expected, actual, msgAndArgs...) + } +} + +// assertResultsEqual asserts that actual result is equal to the expected result. +// +// The comparison is relaxed when using client types other than goClientType. +func assertResultsEqual(t *testing.T, client ClientType, expected any, actual any, msgAndArgs ...any) { + switch client { + case httpClientType: + if !areResultsEqual(expected, actual) { + assert.EqualValues(t, expected, actual, msgAndArgs...) + } + default: + assert.EqualValues(t, expected, actual, msgAndArgs...) + } +} + +// areResultsAnyOf returns true if any of the expected results are of equal value. +// +// Values of type json.Number and immutable.Option will be reduced to their underlying types. +func areResultsAnyOf(expected AnyOf, actual any) bool { + for _, v := range expected { + if areResultsEqual(v, actual) { + return true + } + } + return false +} + +// areResultsEqual returns true if the expected and actual results are of equal value. +// +// Values of type json.Number and immutable.Option will be reduced to their underlying types. +func areResultsEqual(expected any, actual any) bool { + switch expectedVal := expected.(type) { + case map[string]any: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.(map[string]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for k, v := range expectedVal { + if !areResultsEqual(v, actualVal[k]) { + return false + } + } + return true + case uint64, uint32, uint16, uint8, uint, int64, int32, int16, int8, int: + jsonNum, ok := actual.(json.Number) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + actualVal, err := jsonNum.Int64() + if err != nil { + return false + } + return assert.ObjectsAreEqualValues(expected, actualVal) + case float32, float64: + jsonNum, ok := actual.(json.Number) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + actualVal, err := jsonNum.Float64() + if err != nil { + return false + } + return assert.ObjectsAreEqualValues(expected, actualVal) + case immutable.Option[float64]: + return areResultOptionsEqual(expectedVal, actual) + case immutable.Option[uint64]: + return areResultOptionsEqual(expectedVal, actual) + case immutable.Option[int64]: + return areResultOptionsEqual(expectedVal, actual) + case immutable.Option[bool]: + return areResultOptionsEqual(expectedVal, actual) + case immutable.Option[string]: + return areResultOptionsEqual(expectedVal, actual) + case []int64: + return areResultArraysEqual(expectedVal, actual) + case []uint64: + return areResultArraysEqual(expectedVal, actual) + case []float64: + return areResultArraysEqual(expectedVal, actual) + case []string: + return areResultArraysEqual(expectedVal, actual) + case []bool: + return areResultArraysEqual(expectedVal, actual) + case []any: + return areResultArraysEqual(expectedVal, actual) + case []map[string]any: + return areResultArraysEqual(expectedVal, actual) + case []immutable.Option[float64]: + return areResultArraysEqual(expectedVal, actual) + case []immutable.Option[uint64]: + return areResultArraysEqual(expectedVal, actual) + case []immutable.Option[int64]: + return areResultArraysEqual(expectedVal, actual) + case []immutable.Option[bool]: + return areResultArraysEqual(expectedVal, actual) + case []immutable.Option[string]: + return areResultArraysEqual(expectedVal, actual) + default: + return assert.ObjectsAreEqualValues(expected, actual) + } +} + +// areResultOptionsEqual returns true if the value of the expected immutable.Option +// and actual result are of equal value. +// +// Values of type json.Number and immutable.Option will be reduced to their underlying types. +func areResultOptionsEqual[S any](expected immutable.Option[S], actual any) bool { + var expectedVal any + if expected.HasValue() { + expectedVal = expected.Value() + } + return areResultsEqual(expectedVal, actual) +} + +// areResultArraysEqual returns true if the array of expected results and actual results +// are of equal value. +// +// Values of type json.Number and immutable.Option will be reduced to their underlying types. +func areResultArraysEqual[S any](expected []S, actual any) bool { + if len(expected) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expected) != len(actualVal) { + return false + } + for i, v := range expected { + if !areResultsEqual(v, actualVal[i]) { + return false + } + } + return true +} diff --git a/tests/integration/schema/migrations/query/with_p2p_test.go b/tests/integration/schema/migrations/query/with_p2p_test.go index d71ccbc51a..7543b3275a 100644 --- a/tests/integration/schema/migrations/query/with_p2p_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_test.go @@ -30,6 +30,7 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing Schema: ` type Users { name: String + verified: Boolean } `, }, @@ -38,15 +39,15 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing NodeID: immutable.Some(1), Patch: ` [ - { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "verified", "Kind": "Boolean"} } + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "email", "Kind": "String"} } ] `, }, testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreihn4qameldz3j7rfundmd4ldhxnaircuulk6h2vcwnpcgxl4oqffq", - DestinationSchemaVersionID: "bafkreia56p6i6o3l4jijayiqd5eiijsypjjokbldaxnmqgeav6fe576hcy", + SourceSchemaVersionID: "bafkreifmgqtwpvepenteuvj27u4ewix6nb7ypvyz6j555wsk5u2n7hrldm", + DestinationSchemaVersionID: "bafkreigfqdqnj5dunwgcsf2a6ht6q6m2yv3ys6byw5ifsmi5lfcpeh5t7e", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -99,7 +100,100 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing Results: []map[string]any{ { "name": "John", - // todo: The migration has not been run as P2P assumes it is being synced at the latest local version + // John has been migrated up to the newer schema version on node 1 + "verified": true, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaMigrationQueryWithP2PReplicatedDocAtNewerSchemaVersion(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + verified: Boolean + } + `, + }, + testUtils.SchemaPatch{ + // Patch node 0 only + NodeID: immutable.Some(0), + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "email", "Kind": "String"} } + ] + `, + }, + testUtils.ConfigureMigration{ + // Register the migration on both nodes. + LensConfig: client.LensConfig{ + SourceSchemaVersionID: "bafkreifmgqtwpvepenteuvj27u4ewix6nb7ypvyz6j555wsk5u2n7hrldm", + DestinationSchemaVersionID: "bafkreigfqdqnj5dunwgcsf2a6ht6q6m2yv3ys6byw5ifsmi5lfcpeh5t7e", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "verified", + "value": true, + }, + }, + }, + }, + }, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.CreateDoc{ + // Create John on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "name": "John", + "verified": true + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + // Node 0 should yield results as they were defined + NodeID: immutable.Some(0), + Request: `query { + Users { + name + verified + } + }`, + Results: []map[string]any{ + { + "name": "John", + "verified": true, + }, + }, + }, + testUtils.Request{ + // Node 1 should yield results migrated down to the old schema version. + NodeID: immutable.Some(1), + Request: `query { + Users { + name + verified + } + }`, + Results: []map[string]any{ + { + "name": "John", + // John has been migrated down to the older schema version on node 1 + // clearing the verified field "verified": nil, }, }, @@ -109,3 +203,83 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing testUtils.ExecuteTestCase(t, test) } + +func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchNewerSchemaVersionWithSchemaHistoryGap(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + // Patch node 0 only + NodeID: immutable.Some(0), + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "verified", "Kind": "Boolean"} } + ] + `, + }, + testUtils.SchemaPatch{ + // Patch node 0 only + NodeID: immutable.Some(0), + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "email", "Kind": "String"} } + ] + `, + }, + testUtils.ConfigureMigration{ + // Register a migration from version 2 to version 3 on both nodes. + // There is no migration from version 1 to 2, thus node 1 has no knowledge of schema version 2. + LensConfig: client.LensConfig{ + SourceSchemaVersionID: "bafkreia56p6i6o3l4jijayiqd5eiijsypjjokbldaxnmqgeav6fe576hcy", + DestinationSchemaVersionID: "bafkreiadb2rps7a2zykywfxwfpgkvet5vmzaig4nvzl5sgfqquzr3qrvsq", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "verified", + "value": true, + }, + }, + }, + }, + }, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.CreateDoc{ + // Create John on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "name": "John" + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + // Node 1 should also yield the synced doc, even though there was a gap in the schema version history + Request: `query { + Users { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/migrations/query/with_txn_test.go b/tests/integration/schema/migrations/query/with_txn_test.go index 059af4d461..3c55fd7748 100644 --- a/tests/integration/schema/migrations/query/with_txn_test.go +++ b/tests/integration/schema/migrations/query/with_txn_test.go @@ -21,8 +21,6 @@ import ( "github.com/sourcenetwork/defradb/tests/lenses" ) -// todo: This test documents unwanted behaviour and should be fixed with -// https://github.com/sourcenetwork/defradb/issues/1592 func TestSchemaMigrationQueryWithTxn(t *testing.T) { test := testUtils.TestCase{ Description: "Test schema migration, with transaction", @@ -74,10 +72,8 @@ func TestSchemaMigrationQueryWithTxn(t *testing.T) { }`, Results: []map[string]any{ { - "name": "John", - // This is the bug - although the request and migration are on the same transaction - // the migration is not picked up during the request. - "verified": nil, + "name": "John", + "verified": true, }, }, }, diff --git a/tests/integration/schema/migrations/with_txn_test.go b/tests/integration/schema/migrations/with_txn_test.go index f8eb5b5611..827f40de5e 100644 --- a/tests/integration/schema/migrations/with_txn_test.go +++ b/tests/integration/schema/migrations/with_txn_test.go @@ -21,8 +21,6 @@ import ( "github.com/sourcenetwork/defradb/tests/lenses" ) -// todo: This test documents unwanted behaviour and should be fixed with -// https://github.com/sourcenetwork/defradb/issues/1592 func TestSchemaMigrationGetMigrationsWithTxn(t *testing.T) { test := testUtils.TestCase{ Description: "Test schema migration, with txn", @@ -49,7 +47,23 @@ func TestSchemaMigrationGetMigrationsWithTxn(t *testing.T) { TransactionID: immutable.Some(0), // This is the bug - although the GetMigrations call and migration are on the same transaction // the migration is not returned in the results. - ExpectedResults: []client.LensConfig{}, + ExpectedResults: []client.LensConfig{ + { + SourceSchemaVersionID: "does not exist", + DestinationSchemaVersionID: "also does not exist", + Lens: model.Lens{ + Lenses: []model.LensModule{ + { + Path: lenses.SetDefaultModulePath, + Arguments: map[string]any{ + "dst": "verified", + "value": false, + }, + }, + }, + }, + }, + }, }, }, } diff --git a/tests/integration/schema/relations_test.go b/tests/integration/schema/relations_test.go index 9af43b2095..d1b420afb6 100644 --- a/tests/integration/schema/relations_test.go +++ b/tests/integration/schema/relations_test.go @@ -135,7 +135,7 @@ func TestSchemaRelationErrorsGivenOneSidedManyRelationField(t *testing.T) { dogs: [Dog] } `, - ExpectedError: "relation must be defined on both schemas. Type: Dog", + ExpectedError: "relation must be defined on both schemas. Field: dogs, Type: Dog", }, }, } @@ -155,7 +155,7 @@ func TestSchemaRelationErrorsGivenOneSidedRelationField(t *testing.T) { dog: Dog } `, - ExpectedError: "relation must be defined on both schemas. Type: Dog", + ExpectedError: "relation must be defined on both schemas. Field: dog, Type: Dog", }, }, } @@ -173,7 +173,7 @@ func TestSchemaRelation_GivenSelfReferemceRelationField_ReturnError(t *testing.T bestMate: Dog } `, - ExpectedError: "relation must be defined on both schemas. Type: Dog", + ExpectedError: "relation must be defined on both schemas. Field: bestMate, Type: Dog", }, }, } diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index c90fee99e0..47ef9810be 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -159,7 +159,7 @@ func TestSchemaSimpleErrorsGivenTypeWithInvalidFieldType(t *testing.T) { name: NotAType } `, - ExpectedError: "relation must be defined on both schemas. Type: NotAType", + ExpectedError: "relation must be defined on both schemas. Field: name, Type: NotAType", }, }, } diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index 6d96324f3c..af852c8dd6 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -11,6 +11,7 @@ package kind import ( + "fmt" "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" @@ -33,7 +34,1010 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray(t *testing.T) { { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo", "Kind": 17} } ] `, - ExpectedError: "the adding of new relation fields is not yet supported. Field: foo, Kind: 17", + ExpectedError: "a `Schema` [name] must be provided when adding a new relation field. Field: foo, Kind: 17", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_InvalidSchemaJson(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), invalid schema json", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo", "Kind": 17, "Schema": 123} } + ] + `, + ExpectedError: "json: cannot unmarshal number into Go struct field FieldDescription.Schema.Fields.Schema of type string", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingRelationType(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (17), missing relation type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo", "Kind": 17, "Schema": "Users"} } + ] + `, + ExpectedError: "invalid RelationType. Field: foo, Expected: 10, Actual: 0", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingRelationName(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), missing relation name", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 17, "RelationType": 10, "Schema": "Users" + }} + ] + `, + ExpectedError: "missing relation name. Field: foo", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingKind(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), id field missing kind", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id"} } + ] + `, + ExpectedError: "relational id field of invalid kind. Field: foo_id, Expected: ID, Actual: 0", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldInvalidKind(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), id field invalid kind", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id", "Kind": 2} } + ] + `, + ExpectedError: "relational id field of invalid kind. Field: foo_id, Expected: ID, Actual: Boolean", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingRelationType(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), id field missing relation type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id", "Kind": 1} } + ] + `, + ExpectedError: "invalid RelationType. Field: foo_id, Expected: 64, Actual: 0", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldInvalidRelationType(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), id field invalid RelationType", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id", "Kind": 1, "RelationType": 4} } + ] + `, + ExpectedError: "invalid RelationType. Field: foo_id, Expected: 64, Actual: 4", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_IDFieldMissingRelationName(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), id field missing relation name", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id", "Kind": 1, "RelationType": 64} } + ] + `, + ExpectedError: "missing relation name. Field: foo_id", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_OnlyHalfRelationDefined(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), only half relation defined", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + ExpectedError: "relation must be defined on both schemas. Field: foo, Type: Users", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_NoPrimaryDefined(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), no primary defined", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 9, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 17, "RelationType": 10, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "primary side of relation not defined. RelationName: foo", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_PrimaryDefinedOnManySide(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), no primary defined", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 9, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 17, "RelationType": 138, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "cannot set the many side of a relation as primary. Field: foobar", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_RelatedKindMismatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), related kind mismatch", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 16, "RelationType": 10, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "invalid Kind of the related field. RelationName: foo, Expected: 17, Actual: 16", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_RelatedKindAndRelationTypeMismatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), related kind mismatch", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 16, "RelationType": 9, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "invalid Kind of the related field. RelationName: foo, Expected: 17, Actual: 16", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_RelatedRelationTypeMismatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), related relation type mismatch", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 16, "RelationType": 5, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "invalid Kind of the related field. RelationName: foo, Expected: 17, Actual: 16", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), valid, functional", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 17, "RelationType": 10, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + }, + testUtils.Request{ + Request: `mutation { + create_Users(data: "{\"name\": \"John\"}") { + _key + } + }`, + Results: []map[string]any{ + { + "_key": key1, + }, + }, + }, + testUtils.Request{ + Request: fmt.Sprintf(`mutation { + create_Users(data: "{\"name\": \"Keenan\", \"foo\": \"%s\"}") { + name + foo { + name + } + } + }`, + key1, + ), + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + }, + }, + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": []map[string]any{}, + }, + { + "name": "John", + "foo": nil, + "foobar": []map[string]any{ + { + "name": "Keenan", + }, + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_SinglePrimaryObjectKindSubstitution(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), with single object Kind substitution", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 17, "RelationType": 10, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": []map[string]any{}, + }, + { + "name": "John", + "foo": nil, + "foobar": []map[string]any{ + { + "name": "Keenan", + }, + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_SingleSecondaryObjectKindSubstitution(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), with single object Kind substitution", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "[Users]", "RelationType": 10, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": []map[string]any{}, + }, + { + "name": "John", + "foo": nil, + "foobar": []map[string]any{ + { + "name": "Keenan", + }, + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitution(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), with object Kind substitution", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "[Users]", "RelationType": 10, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": []map[string]any{}, + }, + { + "name": "John", + "foo": nil, + "foobar": []map[string]any{ + { + "name": "Keenan", + }, + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_ObjectKindSubstitutionWithAutoSchemaValues(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), with object Kind substitution", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 137, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "[Users]", "RelationType": 10, "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": []map[string]any{}, + }, + { + "name": "John", + "foo": nil, + "foobar": []map[string]any{ + { + "name": "Keenan", + }, + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_PrimaryObjectKindAndSchemaMismatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), with Kind and Schema mismatch", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Dog { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 137, "Schema": "Dog", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "[Users]", "RelationType": 10, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "field Kind does not match field Schema. Kind: Users, Schema: Dog", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_SecondaryObjectKindAndSchemaMismatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), with Kind and Schema mismatch", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Dog { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 137, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "[Users]", "RelationType": 10, "Schema": "Dog", "RelationName": "foo" + }} + ] + `, + ExpectedError: "field Kind does not match field Schema. Kind: [Users], Schema: Dog", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingPrimaryIDField(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), with auto id field generation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 137, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "[Users]", "RelationType": 10, "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo_id + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo_id": key1, + "foo": map[string]any{ + "name": "John", + }, + "foobar": []map[string]any{}, + }, + { + "name": "John", + "foo": nil, + "foo_id": nil, + "foobar": []map[string]any{ + { + "name": "Keenan", + }, + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObjectArray_MissingPrimaryIDField_DoesNotCreateIdOnManySide(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object array (17), with auto id field generation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 137, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "[Users]", "RelationType": 10, "RelationName": "foo" + }} + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + foobar_id + } + }`, + ExpectedError: `Cannot query field "foobar_id" on type "Users"`, }, }, } diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go index 76dd134982..e09aa4dfac 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go @@ -11,6 +11,7 @@ package kind import ( + "fmt" "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" @@ -33,9 +34,1019 @@ func TestSchemaUpdatesAddFieldKindForeignObject(t *testing.T) { { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo", "Kind": 16} } ] `, - ExpectedError: "the adding of new relation fields is not yet supported. Field: foo, Kind: 16", + ExpectedError: "a `Schema` [name] must be provided when adding a new relation field. Field: foo, Kind: 16", }, }, } testUtils.ExecuteTestCase(t, test) } + +func TestSchemaUpdatesAddFieldKindForeignObject_InvalidSchemaJson(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), invalid schema json", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo", "Kind": 16, "Schema": 123} } + ] + `, + ExpectedError: "json: cannot unmarshal number into Go struct field FieldDescription.Schema.Fields.Schema of type string", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationType(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), missing relation type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo", "Kind": 16, "Schema": "Users"} } + ] + `, + ExpectedError: "invalid RelationType. Field: foo, Expected: 1 and 4 or 8, with optionally 128, Actual: 0", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_UnknownSchema(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), unknown schema", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 5, "Schema": "Unknown" + }} + ] + `, + ExpectedError: "no schema found for given name. Field: foo, Schema: Unknown", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_MissingRelationName(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), missing relation name", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 5, "Schema": "Users" + }} + ] + `, + ExpectedError: "missing relation name. Field: foo", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingKind(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), id field missing kind", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id"} } + ] + `, + ExpectedError: "relational id field of invalid kind. Field: foo_id, Expected: ID, Actual: 0", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidKind(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), id field invalid kind", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id", "Kind": 2} } + ] + `, + ExpectedError: "relational id field of invalid kind. Field: foo_id, Expected: ID, Actual: Boolean", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingRelationType(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), id field missing relation type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id", "Kind": 1} } + ] + `, + ExpectedError: "invalid RelationType. Field: foo_id, Expected: 64, Actual: 0", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldInvalidRelationType(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), id field invalid RelationType", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id", "Kind": 1, "RelationType": 4} } + ] + `, + ExpectedError: "invalid RelationType. Field: foo_id, Expected: 64, Actual: 4", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_IDFieldMissingRelationName(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), id field missing relation name", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "foo_id", "Kind": 1, "RelationType": 64} } + ] + `, + ExpectedError: "missing relation name. Field: foo_id", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_OnlyHalfRelationDefined(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), only half relation defined", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + ExpectedError: "relation must be defined on both schemas. Field: foo, Type: Users", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_NoPrimaryDefined(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), no primary defined", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 5, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 16, "RelationType": 5, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "primary side of relation not defined. RelationName: foo", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_BothSidesPrimary(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), both sides primary", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar_id", "Kind": 1, "RelationType": 64, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "both sides of a relation cannot be primary. RelationName: foo", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_RelatedKindMismatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), related kind mismatch", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 17, "RelationType": 5, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "invalid Kind of the related field. RelationName: foo, Expected: 16, Actual: 17", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_RelatedRelationTypeMismatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), related relation type mismatch", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 16, "RelationType": 9, "Schema": "Users", "RelationName": "foo" + }} + ] + `, + ExpectedError: "invalid RelationType of the related field. RelationName: foo, Expected: 4, Actual: 9", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), valid, functional", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 16, "RelationType": 5, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + }, + testUtils.Request{ + Request: `mutation { + create_Users(data: "{\"name\": \"John\"}") { + _key + } + }`, + Results: []map[string]any{ + { + "_key": key1, + }, + }, + }, + testUtils.Request{ + Request: fmt.Sprintf(`mutation { + create_Users(data: "{\"name\": \"Keenan\", \"foo\": \"%s\"}") { + name + foo { + name + } + } + }`, + key1, + ), + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + }, + }, + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": nil, + }, + { + "name": "John", + "foo": nil, + "foobar": map[string]any{ + "name": "Keenan", + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_SinglePrimaryObjectKindSubstitution(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), with single object Kind substitution", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": 16, "RelationType": 5, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": nil, + }, + { + "name": "John", + "foo": nil, + "foobar": map[string]any{ + "name": "Keenan", + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_SingleSecondaryObjectKindSubstitution(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), with single object Kind substitution", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": 16, "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "Users", "RelationType": 5, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": nil, + }, + { + "name": "John", + "foo": nil, + "foobar": map[string]any{ + "name": "Keenan", + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindSubstitution(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), with object Kind substitution", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 133, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "Users", "RelationType": 5, "Schema": "Users", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": nil, + }, + { + "name": "John", + "foo": nil, + "foobar": map[string]any{ + "name": "Keenan", + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindSubstitutionWithAutoSchemaValues(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), with object Kind substitution", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 133, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "Users", "RelationType": 5, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": nil, + }, + { + "name": "John", + "foo": nil, + "foobar": map[string]any{ + "name": "Keenan", + }, + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_ObjectKindAndSchemaMismatch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), with Kind and Schema mismatch", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaUpdate{ + Schema: ` + type Dog { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 133, "Schema": "Dog", "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "Users", "RelationType": 5, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + ExpectedError: "field Kind does not match field Schema. Kind: Users, Schema: Dog", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_MissingPrimaryIDField(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), with auto primary ID field creation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 133, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "Users", "RelationType": 5, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": nil, + }, + { + "name": "John", + "foo": nil, + "foobar": map[string]any{ + "name": "Keenan", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindForeignObject_MissingSecondaryIDField(t *testing.T) { + key1 := "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad" + + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind foreign object (16), with auto secondary ID field creation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo", "Kind": "Users", "RelationType": 133, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foo_id", "Kind": 1, "RelationType": 64, "RelationName": "foo" + }}, + { "op": "add", "path": "/Users/Schema/Fields/-", "value": { + "Name": "foobar", "Kind": "Users", "RelationType": 5, "RelationName": "foo" + }} + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf(`{ + "name": "Keenan", + "foo": "%s" + }`, + key1, + ), + }, + testUtils.Request{ + Request: `query { + Users { + name + foo { + name + } + foobar { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Keenan", + "foo": map[string]any{ + "name": "John", + }, + "foobar": nil, + }, + { + "name": "John", + "foo": nil, + "foobar": map[string]any{ + "name": "Keenan", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/with_index_sub_test.go b/tests/integration/schema/updates/add/field/with_index_sub_test.go new file mode 100644 index 0000000000..eb4dc3d9c0 --- /dev/null +++ b/tests/integration/schema/updates/add/field/with_index_sub_test.go @@ -0,0 +1,138 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaUpdatesAddFieldSimple_FieldIndexedByName(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field, index by name", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/email", "value": {"Kind": 11} } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + email + } + }`, + Results: []map[string]any{}, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldSimple_FieldIndexedByNameWithSameNameDefinedInValue(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field, index by name", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/email", "value": {"Name": "email","Kind": 11} } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + email + } + }`, + Results: []map[string]any{}, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldSimple_FieldIndexedByNameWithDifferentNameDefinedInValue(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field, index by name", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/email", "value": {"Name": "different field name","Kind": 11} } + ] + `, + ExpectedError: "the index used does not match the given name", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldSimple_FieldIndexedByNameMultipleTimes(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field, index by name, and test-op via name-index", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Schema/Fields/email", "value": {"Kind": 11} }, + { "op": "test", "path": "/Users/Schema/Fields/email/Kind", "value": 11 } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + email + } + }`, + Results: []map[string]any{}, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/test/field/simple_test.go b/tests/integration/schema/updates/test/field/simple_test.go index 38f27f0d0e..24532a8718 100644 --- a/tests/integration/schema/updates/test/field/simple_test.go +++ b/tests/integration/schema/updates/test/field/simple_test.go @@ -101,10 +101,59 @@ func TestSchemaUpdatesTestFieldPasses(t *testing.T) { testUtils.SchemaPatch{ Patch: ` [ - { "op": "test", "path": "/Users/Schema/Fields/1", "value": {"ID":1, "Name": "name", "Kind": 11} } + { "op": "test", "path": "/Users/Schema/Fields/1", "value": { + "ID":1, "Name": "name", "Kind": 11, "Schema":"","RelationName":"","Typ":1,"RelationType":0 + } } + ] + `, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesTestFieldPasses_UsingFieldNameAsIndex(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, test field passes", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "test", "path": "/Users/Schema/Fields/name", "value": { + "ID":1, "Kind": 11, "Schema":"","RelationName":"","Typ":1,"RelationType":0 + } } + ] + `, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesTestFieldPasses_TargettingKindUsingFieldNameAsIndex(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, test field passes", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "test", "path": "/Users/Schema/Fields/name/Kind", "value": 11 } ] `, - ExpectedError: "testing value /Users/Schema/Fields/1 failed: test failed", }, }, } diff --git a/tests/integration/schema/updates/test/simple_test.go b/tests/integration/schema/updates/test/simple_test.go index e18e008490..6052283e39 100644 --- a/tests/integration/schema/updates/test/simple_test.go +++ b/tests/integration/schema/updates/test/simple_test.go @@ -62,60 +62,3 @@ func TestSchemaUpdatesTestCollectionNamePasses(t *testing.T) { } testUtils.ExecuteTestCase(t, test) } - -/* WIP -func TestSchemaUpdatesTestCollectionNameDoesNotChangeVersionID(t *testing.T) { - schemaVersionID := "bafkreicg3xcpjlt3ecguykpcjrdx5ogi4n7cq2fultyr6vippqdxnrny3u" - - test := testUtils.TestCase{ - Description: "Test schema update, test collection name does not change version ID", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "test", "path": "/Users/Name", "value": "Users" } - ] - `, - }, - testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, - Doc: `{ - "name": "Johnnn" - }`, - }, - testUtils.Request{ - Request: `query { - commits (field: "C") { - schemaVersionId - } - }`, - Results: []map[string]any{ - { - // Update commit - "schemaVersionId": schemaVersionID, - }, - { - // Create commit - "schemaVersionId": schemaVersionID, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} -*/ diff --git a/tests/integration/state.go b/tests/integration/state.go index f7d4dd45a0..69bd65e2b5 100644 --- a/tests/integration/state.go +++ b/tests/integration/state.go @@ -33,6 +33,9 @@ type state struct { // The type of database currently being tested. dbt DatabaseType + // The type of client currently being tested. + clientType ClientType + // Any explicit transactions active in this test. // // This is order dependent and the property is accessed by index. @@ -83,6 +86,7 @@ func newState( t *testing.T, testCase TestCase, dbt DatabaseType, + clientType ClientType, collectionNames []string, ) *state { return &state{ @@ -90,6 +94,7 @@ func newState( t: t, testCase: testCase, dbt: dbt, + clientType: clientType, txns: []datastore.Txn{}, allActionsDone: make(chan struct{}), subscriptionResultsChans: []chan func(){}, diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 38624d42e8..e17adfdeaa 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -26,6 +26,13 @@ type TestCase struct { // this test should execute. They will execute in the order that they // are provided. Actions []any + + // If provided a value, SupportedMutationTypes will cause this test to be skipped + // if the active mutation type is not within the given set. + // + // This is to only be used in the very rare cases where we really do want behavioural + // differences between mutation types, or we need to temporarily document a bug. + SupportedMutationTypes immutable.Option[[]MutationType] } // SetupComplete is a flag to explicitly notify the change detector at which point @@ -79,7 +86,7 @@ type SchemaPatch struct { } // CreateDoc will attempt to create the given document in the given collection -// using the collection api. +// using the set [MutationType]. type CreateDoc struct { // NodeID may hold the ID (index) of a node to apply this create to. // @@ -125,8 +132,7 @@ type DeleteDoc struct { DontSync bool } -// UpdateDoc will attempt to update the given document in the given collection -// using the collection api. +// UpdateDoc will attempt to update the given document using the set [MutationType]. type UpdateDoc struct { // NodeID may hold the ID (index) of a node to apply this update to. // diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 3f66f07d9c..622478f513 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -12,30 +12,35 @@ package tests import ( "context" + "encoding/json" "fmt" "os" "path" "reflect" + "strconv" "strings" "testing" "time" - badger "github.com/dgraph-io/badger/v3" + badger "github.com/dgraph-io/badger/v4" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v3" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" ) const ( + clientGoEnvName = "DEFRA_CLIENT_GO" + clientHttpEnvName = "DEFRA_CLIENT_HTTP" memoryBadgerEnvName = "DEFRA_BADGER_MEMORY" fileBadgerEnvName = "DEFRA_BADGER_FILE" fileBadgerPathEnvName = "DEFRA_BADGER_FILE_PATH" @@ -45,6 +50,7 @@ const ( detectDbChangesEnvName = "DEFRA_DETECT_DATABASE_CHANGES" repositoryEnvName = "DEFRA_CODE_REPOSITORY" targetBranchEnvName = "DEFRA_TARGET_BRANCH" + mutationTypeEnvName = "DEFRA_MUTATION_TYPE" documentationDirectoryName = "data_format_changes" ) @@ -56,11 +62,52 @@ const ( badgerFileType DatabaseType = "badger-file-system" ) +type ClientType string + +const ( + // goClientType enables running the test suite using + // the go implementation of the client.DB interface. + goClientType ClientType = "go" + // httpClientType enables running the test suite using + // the http implementation of the client.DB interface. + httpClientType ClientType = "http" +) + +// The MutationType that tests will run using. +// +// For example if set to [CollectionSaveMutationType], all supporting +// actions (such as [UpdateDoc]) will execute via [Collection.Save]. +// +// Defaults to CollectionSaveMutationType. +type MutationType string + +const ( + // CollectionSaveMutationType will cause all supporting actions + // to run their mutations via [Collection.Save]. + CollectionSaveMutationType MutationType = "collection-save" + + // CollectionNamedMutationType will cause all supporting actions + // to run their mutations via their corresponding named [Collection] + // call. + // + // For example, CreateDoc will call [Collection.Create], and + // UpdateDoc will call [Collection.Update]. + CollectionNamedMutationType MutationType = "collection-named" + + // GQLRequestMutationType will cause all supporting actions to + // run their mutations using GQL requests, typically these will + // include a `id` parameter to target the specified document. + GQLRequestMutationType MutationType = "gql" +) + var ( log = logging.MustNewLogger("tests.integration") badgerInMemory bool badgerFile bool inMemoryStore bool + httpClient bool + goClient bool + mutationType MutationType ) const subscriptionTimeout = 1 * time.Second @@ -101,35 +148,46 @@ var previousTestCaseTestName string func init() { // We use environment variables instead of flags `go test ./...` throws for all packages // that don't have the flag defined - badgerFileValue, _ := os.LookupEnv(fileBadgerEnvName) - badgerInMemoryValue, _ := os.LookupEnv(memoryBadgerEnvName) - databaseDir, _ = os.LookupEnv(fileBadgerPathEnvName) - rootDatabaseDir, _ = os.LookupEnv(rootDBFilePathEnvName) - detectDbChangesValue, _ := os.LookupEnv(detectDbChangesEnvName) - inMemoryStoreValue, _ := os.LookupEnv(inMemoryEnvName) - repositoryValue, repositorySpecified := os.LookupEnv(repositoryEnvName) - setupOnlyValue, _ := os.LookupEnv(setupOnlyEnvName) - targetBranchValue, targetBranchSpecified := os.LookupEnv(targetBranchEnvName) - - badgerFile = getBool(badgerFileValue) - badgerInMemory = getBool(badgerInMemoryValue) - inMemoryStore = getBool(inMemoryStoreValue) - DetectDbChanges = getBool(detectDbChangesValue) - SetupOnly = getBool(setupOnlyValue) - - if !repositorySpecified { + httpClient, _ = strconv.ParseBool(os.Getenv(clientHttpEnvName)) + goClient, _ = strconv.ParseBool(os.Getenv(clientGoEnvName)) + badgerFile, _ = strconv.ParseBool(os.Getenv(fileBadgerEnvName)) + badgerInMemory, _ = strconv.ParseBool(os.Getenv(memoryBadgerEnvName)) + inMemoryStore, _ = strconv.ParseBool(os.Getenv(inMemoryEnvName)) + DetectDbChanges, _ = strconv.ParseBool(os.Getenv(detectDbChangesEnvName)) + SetupOnly, _ = strconv.ParseBool(os.Getenv(setupOnlyEnvName)) + + var repositoryValue string + if value, ok := os.LookupEnv(repositoryEnvName); ok { + repositoryValue = value + } else { repositoryValue = "https://github.com/sourcenetwork/defradb.git" } - if !targetBranchSpecified { + var targetBranchValue string + if value, ok := os.LookupEnv(targetBranchEnvName); ok { + targetBranchValue = value + } else { targetBranchValue = "develop" } - // default is to run against all + if value, ok := os.LookupEnv(mutationTypeEnvName); ok { + mutationType = MutationType(value) + } else { + // Default to testing mutations via Collection.Save - it should be simpler and + // faster. We assume this is desirable when not explicitly testing any particular + // mutation type. + mutationType = CollectionSaveMutationType + } + + // Default is to test go client type. + if !goClient && !httpClient { + goClient = true + } + + // Default is to test all but filesystem db types. if !badgerInMemory && !badgerFile && !inMemoryStore && !DetectDbChanges { - badgerInMemory = true - // Testing against the file system is off by default badgerFile = false + badgerInMemory = true inMemoryStore = true } @@ -138,26 +196,24 @@ func init() { } } -func getBool(val string) bool { - switch strings.ToLower(val) { - case "true": - return true - default: - return false - } -} - -// AssertPanicAndSkipChangeDetection asserts that the code of function actually panics, +// AssertPanic asserts that the code inside the specified PanicTestFunc panics. // -// also ensures the change detection is skipped so no false fails happen. +// This function is not supported by either the change detector, or the http-client. +// Calling this within either of them will result in the test being skipped. // -// Usage: AssertPanicAndSkipChangeDetection(t, func() { executeTestCase(t, test) }) -func AssertPanicAndSkipChangeDetection(t *testing.T, f assert.PanicTestFunc) bool { +// Usage: AssertPanic(t, func() { executeTestCase(t, test) }) +func AssertPanic(t *testing.T, f assert.PanicTestFunc) bool { if IsDetectingDbChanges() { // The `assert.Panics` call will falsely fail if this test is executed during - // a detect changes test run - t.Skip() + // a detect changes test run. + t.Skip("Assert panic with the change detector is not currently supported.") + } + + if httpClient { + // The http-client will return an error instead of panicing at the moment. + t.Skip("Assert panic with the http client is not currently supported.") } + return assert.Panics(t, f, "expected a panic, but none found.") } @@ -217,49 +273,44 @@ func newBadgerFileDB(ctx context.Context, t testing.TB, path string) (client.DB, return db, nil } -func GetDatabaseTypes() []DatabaseType { - databases := []DatabaseType{} +// GetDatabase returns the database implementation for the current +// testing state. The database type and client type on the test state +// are used to select the datastore and client implementation to use. +func GetDatabase(s *state) (cdb client.DB, path string, err error) { + switch s.dbt { + case badgerIMType: + cdb, err = NewBadgerMemoryDB(s.ctx, db.WithUpdateEvents()) - if badgerInMemory { - databases = append(databases, badgerIMType) - } + case badgerFileType: + cdb, path, err = NewBadgerFileDB(s.ctx, s.t) - if badgerFile { - databases = append(databases, badgerFileType) + case defraIMType: + cdb, err = NewInMemoryDB(s.ctx) + + default: + err = fmt.Errorf("invalid database type: %v", s.dbt) } - if inMemoryStore { - databases = append(databases, defraIMType) + if err != nil { + return nil, "", err } - return databases -} + switch s.clientType { + case httpClientType: + cdb, err = http.NewWrapper(cdb) -func GetDatabase(ctx context.Context, t *testing.T, dbt DatabaseType) (client.DB, string, error) { - switch dbt { - case badgerIMType: - db, err := NewBadgerMemoryDB(ctx, db.WithUpdateEvents()) - if err != nil { - return nil, "", err - } - return db, "", nil + case goClientType: + return - case badgerFileType: - db, path, err := NewBadgerFileDB(ctx, t) - if err != nil { - return nil, "", err - } - return db, path, nil + default: + err = fmt.Errorf("invalid client type: %v", s.dbt) + } - case defraIMType: - db, err := NewInMemoryDB(ctx) - if err != nil { - return nil, "", err - } - return db, "", nil + if err != nil { + return nil, "", err } - return nil, "", nil + return } // ExecuteTestCase executes the given TestCase against the configured database @@ -277,14 +328,37 @@ func ExecuteTestCase( return } - ctx := context.Background() - dbts := GetDatabaseTypes() - // Assert that this is not empty to protect against accidental mis-configurations, + skipIfMutationTypeUnsupported(t, testCase.SupportedMutationTypes) + + var clients []ClientType + if httpClient { + clients = append(clients, httpClientType) + } + if goClient { + clients = append(clients, goClientType) + } + + var databases []DatabaseType + if badgerInMemory { + databases = append(databases, badgerIMType) + } + if badgerFile { + databases = append(databases, badgerFileType) + } + if inMemoryStore { + databases = append(databases, defraIMType) + } + + // Assert that these are not empty to protect against accidental mis-configurations, // otherwise an empty set would silently pass all the tests. - require.NotEmpty(t, dbts) + require.NotEmpty(t, databases) + require.NotEmpty(t, clients) - for _, dbt := range dbts { - executeTestCase(ctx, t, collectionNames, testCase, dbt) + ctx := context.Background() + for _, ct := range clients { + for _, dbt := range databases { + executeTestCase(ctx, t, collectionNames, testCase, dbt, ct) + } } } @@ -294,13 +368,14 @@ func executeTestCase( collectionNames []string, testCase TestCase, dbt DatabaseType, + clientType ClientType, ) { log.Info(ctx, testCase.Description, logging.NewKV("Database", dbt)) flattenActions(&testCase) startActionIndex, endActionIndex := getActionRange(testCase) - s := newState(ctx, t, testCase, dbt, collectionNames) + s := newState(ctx, t, testCase, dbt, clientType, collectionNames) setStartingNodes(s) // It is very important that the databases are always closed, otherwise resources will leak @@ -621,7 +696,7 @@ func setStartingNodes( // If nodes have not been explicitly configured via actions, setup a default one. if !hasExplicitNode { - db, path, err := GetDatabase(s.ctx, s.t, s.dbt) + db, path, err := GetDatabase(s) require.Nil(s.t, err) s.nodes = append(s.nodes, &net.Node{ @@ -644,7 +719,7 @@ func restartNodes( for i := len(s.nodes) - 1; i >= 0; i-- { originalPath := databaseDir databaseDir = s.dbPaths[i] - db, _, err := GetDatabase(s.ctx, s.t, s.dbt) + db, _, err := GetDatabase(s) require.Nil(s.t, err) databaseDir = originalPath @@ -762,7 +837,7 @@ func configureNode( // an in memory store. cfg.Datastore.Badger.Path = s.t.TempDir() - db, path, err := GetDatabase(s.ctx, s.t, s.dbt) //disable change dector, or allow it? + db, path, err := GetDatabase(s) //disable change dector, or allow it? require.NoError(s.t, err) var n *net.Node @@ -995,34 +1070,42 @@ func patchSchema( refreshIndexes(s) } -// createDoc creates a document using the collection api and caches it in the -// given documents slice. +// createDoc creates a document using the chosen [mutationType] and caches it in the +// test state object. func createDoc( s *state, action CreateDoc, ) { - // All the docs should be identical, and we only need 1 copy so taking the last - // is okay. + var mutation func(*state, CreateDoc, *net.Node, []client.Collection) (*client.Document, error) + + switch mutationType { + case CollectionSaveMutationType: + mutation = createDocViaColSave + case CollectionNamedMutationType: + mutation = createDocViaColCreate + case GQLRequestMutationType: + mutation = createDocViaGQL + default: + s.t.Fatalf("invalid mutationType: %v", mutationType) + } + + var expectedErrorRaised bool var doc *client.Document actionNodes := getNodes(action.NodeID, s.nodes) for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { - var err error - doc, err = client.NewDocFromJSON([]byte(action.Doc)) - if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) { - return - } - - err = withRetry( + err := withRetry( actionNodes, nodeID, - func() error { return collections[action.CollectionID].Save(s.ctx, doc) }, + func() error { + var err error + doc, err = mutation(s, action, actionNodes[nodeID], collections) + return err + }, ) - if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) { - return - } + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) } - assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, false) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) if action.CollectionID >= len(s.documents) { // Expand the slice if required, so that the document can be accessed by collection index @@ -1031,6 +1114,79 @@ func createDoc( s.documents[action.CollectionID] = append(s.documents[action.CollectionID], doc) } +func createDocViaColSave( + s *state, + action CreateDoc, + node *net.Node, + collections []client.Collection, +) (*client.Document, error) { + var err error + doc, err := client.NewDocFromJSON([]byte(action.Doc)) + if err != nil { + return nil, err + } + + return doc, collections[action.CollectionID].Save(s.ctx, doc) +} + +func createDocViaColCreate( + s *state, + action CreateDoc, + node *net.Node, + collections []client.Collection, +) (*client.Document, error) { + var err error + doc, err := client.NewDocFromJSON([]byte(action.Doc)) + if err != nil { + return nil, err + } + + return doc, collections[action.CollectionID].Create(s.ctx, doc) +} + +func createDocViaGQL( + s *state, + action CreateDoc, + node *net.Node, + collections []client.Collection, +) (*client.Document, error) { + collection := collections[action.CollectionID] + + escapedJson, err := json.Marshal(action.Doc) + require.NoError(s.t, err) + + request := fmt.Sprintf( + `mutation { + create_%s(data: %s) { + _key + } + }`, + collection.Name(), + escapedJson, + ) + + db := getStore(s, node.DB, immutable.None[int](), action.ExpectedError) + + result := db.ExecRequest(s.ctx, request) + if len(result.GQL.Errors) > 0 { + return nil, result.GQL.Errors[0] + } + + resultantDocs, ok := result.GQL.Data.([]map[string]any) + if !ok || len(resultantDocs) == 0 { + return nil, nil + } + + docKeyString := resultantDocs[0]["_key"].(string) + docKey, err := client.NewDocKeyFromString(docKeyString) + require.NoError(s.t, err) + + doc, err := collection.Get(s.ctx, docKey, false) + require.NoError(s.t, err) + + return doc, nil +} + // deleteDoc deletes a document using the collection api and caches it in the // given documents slice. func deleteDoc( @@ -1056,16 +1212,22 @@ func deleteDoc( assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) } -// updateDoc updates a document using the collection api. +// updateDoc updates a document using the chosen [mutationType]. func updateDoc( s *state, action UpdateDoc, ) { - doc := s.documents[action.CollectionID][action.DocID] - - err := doc.SetWithJSON([]byte(action.Doc)) - if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) { - return + var mutation func(*state, UpdateDoc, *net.Node, []client.Collection) error + + switch mutationType { + case CollectionSaveMutationType: + mutation = updateDocViaColSave + case CollectionNamedMutationType: + mutation = updateDocViaColUpdate + case GQLRequestMutationType: + mutation = updateDocViaGQL + default: + s.t.Fatalf("invalid mutationType: %v", mutationType) } var expectedErrorRaised bool @@ -1074,7 +1236,7 @@ func updateDoc( err := withRetry( actionNodes, nodeID, - func() error { return collections[action.CollectionID].Save(s.ctx, doc) }, + func() error { return mutation(s, action, actionNodes[nodeID], collections) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) } @@ -1082,6 +1244,70 @@ func updateDoc( assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) } +func updateDocViaColSave( + s *state, + action UpdateDoc, + node *net.Node, + collections []client.Collection, +) error { + doc := s.documents[action.CollectionID][action.DocID] + + err := doc.SetWithJSON([]byte(action.Doc)) + if err != nil { + return err + } + + return collections[action.CollectionID].Save(s.ctx, doc) +} + +func updateDocViaColUpdate( + s *state, + action UpdateDoc, + node *net.Node, + collections []client.Collection, +) error { + doc := s.documents[action.CollectionID][action.DocID] + + err := doc.SetWithJSON([]byte(action.Doc)) + if err != nil { + return err + } + + return collections[action.CollectionID].Update(s.ctx, doc) +} + +func updateDocViaGQL( + s *state, + action UpdateDoc, + node *net.Node, + collections []client.Collection, +) error { + doc := s.documents[action.CollectionID][action.DocID] + collection := collections[action.CollectionID] + + escapedJson, err := json.Marshal(action.Doc) + require.NoError(s.t, err) + + request := fmt.Sprintf( + `mutation { + update_%s(id: "%s", data: %s) { + _key + } + }`, + collection.Name(), + doc.Key().String(), + escapedJson, + ) + + db := getStore(s, node.DB, immutable.None[int](), action.ExpectedError) + + result := db.ExecRequest(s.ctx, request) + if len(result.GQL.Errors) > 0 { + return result.GQL.Errors[0] + } + return nil +} + // createIndex creates a secondary index using the collection api. func createIndex( s *state, @@ -1294,9 +1520,7 @@ func executeRequest( anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( - s.ctx, - s.t, - s.testCase.Description, + s, &result.GQL, action.Results, action.ExpectedError, @@ -1361,9 +1585,7 @@ func executeSubscriptionRequest( // This assert should be executed from the main test routine // so that failures will be properly handled. expectedErrorRaised := assertRequestResults( - s.ctx, - s.t, - s.testCase.Description, + s, finalResult, action.Results, action.ExpectedError, @@ -1435,16 +1657,14 @@ type docFieldKey struct { } func assertRequestResults( - ctx context.Context, - t *testing.T, - description string, + s *state, result *client.GQLResult, expectedResults []map[string]any, expectedError string, nodeID int, anyOfByField map[docFieldKey][]any, ) bool { - if AssertErrors(t, description, result.Errors, expectedError) { + if AssertErrors(s.t, s.testCase.Description, result.Errors, expectedError) { return true } @@ -1455,15 +1675,9 @@ func assertRequestResults( // Note: if result.Data == nil this panics (the panic seems useful while testing). resultantData := result.Data.([]map[string]any) - log.Info(ctx, "", logging.NewKV("RequestResults", result.Data)) + log.Info(s.ctx, "", logging.NewKV("RequestResults", result.Data)) - // compare results - assert.Equal(t, len(expectedResults), len(resultantData), description) - if len(expectedResults) == 0 { - // Need `require` here otherwise will panic in the for loop that ranges over - // resultantData and tries to access expectedResults[0]. - require.Equal(t, expectedResults, resultantData) - } + require.Equal(s.t, len(expectedResults), len(resultantData), s.testCase.Description) for docIndex, result := range resultantData { expectedResult := expectedResults[docIndex] @@ -1472,14 +1686,20 @@ func assertRequestResults( switch r := expectedValue.(type) { case AnyOf: - assert.Contains(t, r, actualValue) + assertResultsAnyOf(s.t, s.clientType, r, actualValue) dfk := docFieldKey{docIndex, field} valueSet := anyOfByField[dfk] valueSet = append(valueSet, actualValue) anyOfByField[dfk] = valueSet default: - assert.Equal(t, expectedValue, actualValue, fmt.Sprintf("node: %v, doc: %v", nodeID, docIndex)) + assertResultsEqual( + s.t, + s.clientType, + expectedValue, + actualValue, + fmt.Sprintf("node: %v, doc: %v", nodeID, docIndex), + ) } } } @@ -1613,3 +1833,21 @@ func assertBackupContent(t *testing.T, expectedContent, filepath string) { string(b), ) } + +// skipIfMutationTypeUnsupported skips the current test if the given supportedMutationTypes option has value +// and the active mutation type is not contained within that value set. +func skipIfMutationTypeUnsupported(t *testing.T, supportedMutationTypes immutable.Option[[]MutationType]) { + if supportedMutationTypes.HasValue() { + var isTypeSupported bool + for _, supportedMutationType := range supportedMutationTypes.Value() { + if supportedMutationType == mutationType { + isTypeSupported = true + break + } + } + + if !isTypeSupported { + t.Skipf("test does not support given mutation type. Type: %s", mutationType) + } + } +} diff --git a/tests/lenses/.cargo/config.toml b/tests/lenses/.cargo/config.toml new file mode 100644 index 0000000000..a42311eb23 --- /dev/null +++ b/tests/lenses/.cargo/config.toml @@ -0,0 +1,2 @@ +[http] +multiplexing = false diff --git a/tests/lenses/rust_wasm32_copy/Cargo.toml b/tests/lenses/rust_wasm32_copy/Cargo.toml index af081daf34..77b2766d8b 100644 --- a/tests/lenses/rust_wasm32_copy/Cargo.toml +++ b/tests/lenses/rust_wasm32_copy/Cargo.toml @@ -10,6 +10,3 @@ crate-type = ["cdylib"] serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.87" lens_sdk = { version = "0.1.0", git = "https://github.com/lens-vm/lens.git" } - -[http] -multiplexing = false diff --git a/tests/lenses/rust_wasm32_remove/Cargo.toml b/tests/lenses/rust_wasm32_remove/Cargo.toml index 561cad7140..c3b678d76e 100644 --- a/tests/lenses/rust_wasm32_remove/Cargo.toml +++ b/tests/lenses/rust_wasm32_remove/Cargo.toml @@ -10,6 +10,3 @@ crate-type = ["cdylib"] serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.87" lens_sdk = { version = "0.1.0", git = "https://github.com/lens-vm/lens.git" } - -[http] -multiplexing = false diff --git a/tests/lenses/rust_wasm32_set_default/Cargo.toml b/tests/lenses/rust_wasm32_set_default/Cargo.toml index 1798d3146b..14ddd3f992 100644 --- a/tests/lenses/rust_wasm32_set_default/Cargo.toml +++ b/tests/lenses/rust_wasm32_set_default/Cargo.toml @@ -10,6 +10,3 @@ crate-type = ["cdylib"] serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.87" lens_sdk = { version = "0.1.0", git = "https://github.com/lens-vm/lens.git" } - -[http] -multiplexing = false diff --git a/tests/lenses/rust_wasm32_set_default/src/lib.rs b/tests/lenses/rust_wasm32_set_default/src/lib.rs index fec61e422e..3bf433ffa1 100644 --- a/tests/lenses/rust_wasm32_set_default/src/lib.rs +++ b/tests/lenses/rust_wasm32_set_default/src/lib.rs @@ -78,3 +78,33 @@ fn try_transform(ptr: *mut u8) -> Result>, Box> { let result_json = serde_json::to_vec(&input.clone())?; Ok(Some(result_json)) } + +#[no_mangle] +pub extern fn inverse(ptr: *mut u8) -> *mut u8 { + match try_inverse(ptr) { + Ok(o) => match o { + Some(result_json) => lens_sdk::to_mem(lens_sdk::JSON_TYPE_ID, &result_json), + None => lens_sdk::nil_ptr(), + }, + Err(e) => lens_sdk::to_mem(lens_sdk::ERROR_TYPE_ID, &e.to_string().as_bytes()) + } +} + +fn try_inverse(ptr: *mut u8) -> Result>, Box> { + let mut input = match lens_sdk::try_from_mem::>(ptr)? { + Some(v) => v, + // Implementations of `transform` are free to handle nil however they like. In this + // implementation we chose to return nil given a nil input. + None => return Ok(None), + }; + + let params = PARAMETERS.read()? + .clone() + .ok_or(ModuleError::ParametersNotSetError)? + .clone(); + + input.remove(¶ms.dst); + + let result_json = serde_json::to_vec(&input.clone())?; + Ok(Some(result_json)) +} \ No newline at end of file diff --git a/tests/lenses/utils.go b/tests/lenses/utils.go index cfb066db81..132c7d33c4 100644 --- a/tests/lenses/utils.go +++ b/tests/lenses/utils.go @@ -21,6 +21,8 @@ import ( // - `dst` is a string and is the name of the property you wish to set // - `value` can be any valid json value and is the value that you wish the `dst` property // of all documents being transformed by this module to have. +// +// This module has an inverse, which will clear any value in the `dst` field. var SetDefaultModulePath string = getPathRelativeToProjectRoot( "/tests/lenses/rust_wasm32_set_default/target/wasm32-unknown-unknown/debug/rust_wasm32_set_default.wasm", ) diff --git a/tools/cloud/akash/README.md b/tools/cloud/akash/README.md new file mode 100644 index 0000000000..0c69ebfbdf --- /dev/null +++ b/tools/cloud/akash/README.md @@ -0,0 +1,7 @@ +# DefraDB + +DefraDB is a user-centric database that prioritizes data ownership, personal privacy, and information security. Its data model, powered by the convergence of [MerkleCRDTs](https://arxiv.org/pdf/2004.00107.pdf) and the content-addressability of [IPLD](https://docs.ipld.io/), enables a multi-write-master architecture. It features [DQL](https://docs.source.network/references/query-specification/query-language-overview), a query language compatible with GraphQL but providing extra convenience. By leveraging peer-to-peer networking it can be deployed nimbly in novel topologies. Access control is determined by a relationship-based DSL, supporting document or field-level policies, secured by the SourceHub network. DefraDB is a core part of the [Source technologies](https://source.network/) that enable new paradigms of decentralized data and access-control management, user-centric apps, data trustworthiness, and much more. + +Read the documentation on [docs.source.network](https://docs.source.network/). + +View the source code on [GitHub](https://github.com/sourcenetwork/defradb). diff --git a/tools/cloud/akash/deploy.yaml b/tools/cloud/akash/deploy.yaml new file mode 100644 index 0000000000..c6f7070f98 --- /dev/null +++ b/tools/cloud/akash/deploy.yaml @@ -0,0 +1,51 @@ +--- +version: "2.0" + +services: + defradb: + image: sourcenetwork/defradb:v0.6.0 + args: + - start + - --url=0.0.0.0:9181 + expose: + - port: 9161 + as: 9161 + to: + - global: true + - port: 9171 + as: 9171 + to: + - global: true + - port: 9181 + as: 80 + to: + - global: true + +profiles: + compute: + defradb: + resources: + cpu: + units: 1.0 + memory: + size: 1Gi + storage: + size: 1Gi + placement: + akash: + attributes: + host: akash + signedBy: + anyOf: + - "akash1365yvmc4s7awdyj3n2sav7xfx76adc6dnmlx63" + - "akash18qa2a2ltfyvkyj0ggj3hkvuj6twzyumuaru9s4" + pricing: + defradb: + denom: uakt + amount: 10000 + +deployment: + defradb: + akash: + profile: defradb + count: 1 \ No newline at end of file diff --git a/tools/configs/golangci.yaml b/tools/configs/golangci.yaml index 448d334596..c9d69b641e 100644 --- a/tools/configs/golangci.yaml +++ b/tools/configs/golangci.yaml @@ -149,7 +149,7 @@ issues: - errorlint # Exclude running header check in these paths - - path: "(net|datastore/badger/v3/compat_logger.go|datastore/badger/v3/datastore.go|connor)" + - path: "(net|datastore/badger/v4/compat_logger.go|datastore/badger/v4/datastore.go|connor)" linters: - goheader diff --git a/tools/configs/mockery.yaml b/tools/configs/mockery.yaml new file mode 100644 index 0000000000..dccbb2d8f9 --- /dev/null +++ b/tools/configs/mockery.yaml @@ -0,0 +1,50 @@ +# Mockery configuration file for DefraDB. + +with-expecter: true + +quiet: False + +keeptree: True + +disable-version-string: True + +log-level: "info" + +dry-run: False + +mockname: "{{.InterfaceName}}" + +# Makes files names lower case, snake. +filename: "{{.InterfaceNameSnake}}.go" + +outpkg: mocks + +packages: # Packages and their interfaces to generate mocks for. + github.com/ipfs/go-datastore/query: + config: + dir: "./datastore/mocks" + interfaces: + Results: + + github.com/sourcenetwork/defradb/datastore: + config: + dir: "./datastore/mocks" + interfaces: + DAGStore: + DSReaderWriter: + RootStore: + Txn: + + github.com/sourcenetwork/defradb/client: + config: + dir: "./client/mocks" + interfaces: + DB: + Collection: + + github.com/sourcenetwork/defradb/db/fetcher: + config: + dir: "./db/fetcher/mocks" + interfaces: + Fetcher: + EncodedDocument: