diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml deleted file mode 100644 index 802e3188dedc..000000000000 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ /dev/null @@ -1,131 +0,0 @@ -name: Driver Adapters, Smoke Tests -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/driver-adapter-smoke-tests.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -jobs: - driver-adapter-smoke-tests: - name: ${{ matrix.adapter }} - - strategy: - fail-fast: false - matrix: - adapter: ['neon:ws', 'neon:http', planetscale, pg, libsql] - - runs-on: ubuntu-latest - - services: - postgres: - image: postgres - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - # via package.json rewritten into DATABASE_URL before scripts are run - env: - JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} - JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test # ${{ secrets.JS_PG_DATABASE_URL }} - # TODO: test sqld and embedded replicas - JS_LIBSQL_DATABASE_URL: file:/tmp/libsql.db - # TODO: test all three of ("number", "bigint", "string") and conditionally skip some tests as appropriate - JS_LIBSQL_INT_MODE: bigint - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - run: pnpm prisma:${{ matrix.adapter }} - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - run: pnpm ${{ matrix.adapter }}:libquery - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - name: pnpm ${{ matrix.adapter }}:client (using @prisma/client - including engine! - from Npm) - run: pnpm ${{ matrix.adapter }}:client - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - - driver-adapter-smoke-tests-errors: - name: Errors - - runs-on: ubuntu-latest - - # services: - # postgres: - # image: postgres - # env: - # POSTGRES_PASSWORD: postgres - # options: >- - # --health-cmd pg_isready - # --health-interval 10s - # --health-timeout 5s - # --health-retries 5 - # ports: - # - 5432:5432 - - env: - # via package.json rewritten into DATABASE_URL before scripts are run - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - name: pnpm errors - run: pnpm errors - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml deleted file mode 100644 index 7da972c35e1b..000000000000 --- a/.github/workflows/publish-driver-adapters.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: Build and publish Prisma Driver Adapters -run-name: npm - release Driver Adapters ${{ github.event.inputs.prismaVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} - -concurrency: publish-prisma-driver-adapters - -on: - # usually triggered via GH Actions Workflow in prisma/prisma repo - workflow_dispatch: - inputs: - enginesHash: - description: Engine commit hash to checkout for publishing - required: true - prismaVersion: - description: Prisma version to use for publishing - required: true - npmDistTag: - description: npm dist-tag to use for publishing - required: true - default: 'latest' - dryRun: - description: 'Check to do a dry run (does not publish packages)' - type: boolean - -jobs: - build: - name: Build and publish Prisma Driver Adapters - runs-on: ubuntu-latest - steps: - - name: Print input - env: - THE_INPUT: '${{ toJson(github.event.inputs) }}' - run: | - echo $THE_INPUT - - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.enginesHash }} - - - uses: pnpm/action-setup@v2.4.0 - with: - version: 8 - - - uses: actions/setup-node@v3 - with: - node-version: '20.x' - registry-url: 'https://registry.npmjs.org/' - - - name: Install dependencies - run: pnpm i - working-directory: query-engine/driver-adapters/js - - - name: Build - run: pnpm -r build - working-directory: query-engine/driver-adapters/js - - - name: Update version in package.json - run: | - # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result - find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; - working-directory: query-engine/driver-adapters/js - - - name: Publish Prisma Driver Adapters packages - run: | - pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} ${{ env.DRY_RUN }} - working-directory: query-engine/driver-adapters/js - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - DRY_RUN: ${{ github.event.inputs.dryRun == 'true' && '--dry-run' || '' }} - - # - # Failure handlers - # - - - name: Set current job url in SLACK_FOOTER env var - if: ${{ failure() }} - run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure - if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 - env: - SLACK_TITLE: 'prisma driver adapters publishing failed :x:' - SLACK_COLOR: '#FF0000' - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 7823bed70cfb..f3a3badfb804 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -49,6 +49,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: 'Setup Node.js' uses: actions/setup-node@v3 @@ -76,6 +78,15 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Extract Branch Name + id: extract-branch + run: | + branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + if [ -n "$branch" ]; then + echo "Using $branch branch of driver adapters" + echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" + fi + - run: make ${{ matrix.adapter.setup_task }} - uses: dtolnay/rust-toolchain@stable diff --git a/.gitignore b/.gitignore index 43e03e31867d..be185b0f7afc 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,6 @@ dmmf.json graph.dot prisma-schema-wasm/nodejs + +# This symlink looks orphan here, but it comes from prisma/prisma where driver adapters reference a file in their parent directory +tsconfig.build.adapter.json diff --git a/Makefile b/Makefile index 0c3e1541e632..541738c35d95 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,7 @@ CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma +DRIVER_ADAPTERS_BRANCH ?= main LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ @@ -44,7 +45,13 @@ release: ################# test-qe: +ifndef DRIVER_ADAPTER cargo test --package query-engine-tests +else + @echo "Executing query engine tests with $(DRIVER_ADAPTER) driver adapter"; \ + # Add your actual command for the "test-driver-adapter" task here + $(MAKE) test-driver-adapter-$(DRIVER_ADAPTER); +endif test-qe-verbose: cargo test --package query-engine-tests -- --nocapture @@ -80,6 +87,10 @@ dev-sqlite: dev-libsql-sqlite: build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) +test-libsql-sqlite: dev-libsql-sqlite test-qe-st + +test-driver-adapter-libsql: test-libsql-sqlite + start-postgres9: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 @@ -115,12 +126,20 @@ start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 dev-pg-postgres13: start-pg-postgres13 cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) +test-pg-postgres13: dev-pg-postgres13 test-qe-st + +test-driver-adapter-pg: test-pg-postgres13 + start-neon-postgres13: build-qe-napi build-connector-kit-js docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 dev-neon-ws-postgres13: start-neon-postgres13 cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) +test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st + +test-driver-adapter-neon: test-neon-ws-postgres13 + start-postgres14: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 @@ -255,6 +274,10 @@ start-planetscale-vitess8: build-qe-napi build-connector-kit-js dev-planetscale-vitess8: start-planetscale-vitess8 cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) +test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st + +test-driver-adapter-planetscale: test-planetscale-vitess8 + ###################### # Local dev commands # ###################### @@ -262,8 +285,36 @@ dev-planetscale-vitess8: start-planetscale-vitess8 build-qe-napi: cargo build --package query-engine-node-api -build-connector-kit-js: - cd query-engine/driver-adapters/js && pnpm i && pnpm build +build-connector-kit-js: build-driver-adapters symlink-driver-adapters + cd query-engine/driver-adapters/connector-test-kit-executor && pnpm i && pnpm build + +build-driver-adapters: ensure-prisma-present + @echo "Building driver adapters..." + @cd ../prisma && pnpm --filter "*adapter*" i && pnpm --filter "*adapter*" build + @echo "Driver adapters build completed."; + +symlink-driver-adapters: ensure-prisma-present + @echo "Creating symbolic links for driver adapters..." + @for dir in $(wildcard $(realpath ../prisma)/packages/*adapter*); do \ + if [ -d "$$dir" ]; then \ + dir_name=$$(basename "$$dir"); \ + ln -sfn "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \ + echo "Created symbolic link for $$dir_name"; \ + fi; \ + done; + echo "Symbolic links creation completed."; + +ensure-prisma-present: + @if [ -d ../prisma ]; then \ + cd "$(realpath ../prisma)" && git fetch origin main; \ + LOCAL_CHANGES=$$(git diff --name-only HEAD origin/main -- 'packages/*adapter*'); \ + if [ -n "$$LOCAL_CHANGES" ]; then \ + echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ + fi \ + else \ + echo "git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) ../prisma"; \ + git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ + fi; # Quick schema validation of whatever you have in the dev_datamodel.prisma file. validate: diff --git a/README.md b/README.md index 6fd072072757..49c7c1a8ab39 100644 --- a/README.md +++ b/README.md @@ -203,6 +203,7 @@ integration tests. - Alternatively: Load the defined environment in `./.envrc` manually in your shell. **Setup:** + There are helper `make` commands to set up a test environment for a specific database connector you want to test. The commands set up a container (if needed) and write the `.test_config` file, which is picked up by the integration @@ -234,6 +235,31 @@ Other variables may or may not be useful. Run `cargo test` in the repository root. +### Testing driver adapters + +Please refer to the [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters) section in the connector-test-kit-rs README. + +**ℹ️ Important note on developing features that require changes to the both the query engine, and driver adapters code** + +As explained in [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters), running `DRIVER_ADAPTER=$adapter make qe-test` +will ensure you have prisma checked out in your filesystem in the same directory as prisma-engines. This is needed because the driver adapters code is symlinked in prisma-engines. + +When working on a feature or bugfix spanning adapters code and query-engine code, you will need to open sibling PRs in `prisma/prisma` and `prisma/prisma-engines` respectively. +Locally, each time you run `DRIVER_ADAPTER=$adapter make qe-test` tests will run using the driver adapters built from the source code in the working copy of prisma/prisma. All good. + +In CI, tho', we need to denote which branch of prisma/prisma we want to use for tests. In CI, there's no working copy of prisma/prisma before tests run. +The CI jobs clones prisma/prisma `main` branch by default, which doesn't include your local changes. To test in integration, we can tell CI to use the branch of prisma/prisma containing +the changes in adapters. To do it, you can use a simple convention in commit messages. Like this: + +``` +git commit -m "DRIVER_ADAPTERS_BRANCH=prisma-branch-with-changes-in-adapters [...]" +``` + +GitHub actions will then pick up the branch name and use it to clone that branch's code of prisma/prisma, and build the driver adapters code from there. + +When it's time to merge the sibling PRs, you'll need to merge the prisma/prisma PR first, so when merging the engines PR you have the code of the adapters ready in prisma/prisma `main` branch. + + ## Parallel rust-analyzer builds When rust-analzyer runs `cargo check` it will lock the build directory and stop any cargo commands from running until it has completed. This makes the build process feel a lot longer. It is possible to avoid this by setting a different build path for diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 2c849a2aa985..97d19467879a 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -64,34 +64,45 @@ On the note of docker containers: Most connectors require an endpoint to run aga If you choose to set up the databases yourself, please note that the connection strings used in the tests (found in the files in `/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/`) to set up user, password and database for the test user. +### Running + +Note that by default tests run concurrently. + +- VSCode should automatically detect tests and display `run test`. +- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. +- `cargo test` in the `query-engine-tests` crate. +- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. +- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. + #### Running tests through driver adapters -The query engine is able to delegate query execution to javascript through [driver adapters](query-engine/driver-adapters/js/README.md). -This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs drivers the code that actually communicates with the databases. +The query engine is able to delegate query execution to javascript through driver adapters. +This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs +drivers the code that actually communicates with the databases. See [`adapter-*` packages in prisma/prisma](https://github.com/prisma/prisma/tree/main/packages) To run tests through a driver adapters, you should also configure the following environment variables: -* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. Example: ```shell -export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh" export DRIVER_ADAPTER=neon export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' ```` -### Running +We have provided helpers to run the query-engine tests with driver adapters, these helpers set all the required environment +variables for you: -Note that by default tests run concurrently. +```shell +DRIVER_ADAPTER=$adapter make test-qe +``` + +Where `$adapter` is one of the supported adapters: `neon`, `planetscale`, `libsql`. -- VSCode should automatically detect tests and display `run test`. -- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. -- `cargo test` in the `query-engine-tests` crate. -- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. -- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. ## Authoring tests The following is an example on how to write a new test suite, as extending or changing an existing one follows the same rules and considerations. diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index b27f27406e5c..4af4e763298a 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -159,7 +159,7 @@ impl TestConfig { /// and the workspace_root is set, then use the default external test executor. fn fill_defaults(&mut self) { const DEFAULT_TEST_EXECUTOR: &str = - "query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh"; + "query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh"; if self .external_test_executor diff --git a/query-engine/driver-adapters/.gitignore b/query-engine/driver-adapters/.gitignore new file mode 100644 index 000000000000..dab5c8905550 --- /dev/null +++ b/query-engine/driver-adapters/.gitignore @@ -0,0 +1,3 @@ +node_modules +adapter-* +driver-adapter-utils diff --git a/query-engine/driver-adapters/connector-test-kit-executor/.gitignore b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore new file mode 100644 index 000000000000..37b61ff565c7 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore @@ -0,0 +1,3 @@ +node_modules +pnpm-debug.log +dist/ diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json new file mode 100644 index 000000000000..b63694bb4459 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -0,0 +1,40 @@ +{ + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, + "name": "connector-test-kit-executor", + "version": "0.0.1", + "description": "", + "main": "dist/index.mjs", + "module": "dist/index.mjs", + "private": true, + "scripts": { + "build": "tsup ./src/index.ts --format esm --dts", + "lint": "tsc -p ./tsconfig.build.json" + }, + "keywords": [], + "author": "", + "sideEffects": false, + "license": "Apache-2.0", + "dependencies": { + "@libsql/client": "0.3.5", + "@neondatabase/serverless": "^0.6.0", + "@planetscale/database": "1.11.0", + "@prisma/adapter-libsql": "../adapter-libsql", + "@prisma/adapter-neon": "../adapter-neon", + "@prisma/adapter-pg": "../adapter-pg", + "@prisma/adapter-planetscale": "../adapter-planetscale", + "@prisma/driver-adapter-utils": "../driver-adapter-utils", + "@types/pg": "^8.10.2", + "pg": "^8.11.3", + "undici": "^5.26.5", + "ws": "^8.14.2" + }, + "devDependencies": { + "@types/node": "^20.5.1", + "tsup": "^7.2.0", + "tsx": "^3.12.7", + "typescript": "^5.1.6" + } +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml similarity index 79% rename from query-engine/driver-adapters/js/pnpm-lock.yaml rename to query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index 9a82ffdbac63..d140be7b516c 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -4,197 +4,79 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false -importers: - - .: - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - tsup: - specifier: ^7.2.0 - version: 7.2.0(typescript@5.1.6) - tsx: - specifier: ^3.12.7 - version: 3.12.7 - typescript: - specifier: ^5.1.6 - version: 5.1.6 - - adapter-libsql: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - async-mutex: - specifier: 0.4.0 - version: 0.4.0 - devDependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - - adapter-neon: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - postgres-array: - specifier: ^3.0.2 - version: 3.0.2 - devDependencies: - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - - adapter-pg: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - postgres-array: - specifier: ^3.0.2 - version: 3.0.2 - devDependencies: - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - - adapter-planetscale: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - - connector-test-kit-executor: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: 1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - undici: - specifier: ^5.26.2 - version: 5.26.2 - - driver-adapter-utils: - dependencies: - debug: - specifier: ^4.3.4 - version: 4.3.4 - devDependencies: - '@types/debug': - specifier: ^4.1.8 - version: 4.1.8 - - smoke-test-js: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/client': - specifier: 5.4.2 - version: 5.4.2(prisma@5.4.2) - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - pg: - specifier: ^8.11.3 - version: 8.11.3 - superjson: - specifier: ^1.13.1 - version: 1.13.1 - undici: - specifier: ^5.26.2 - version: 5.26.2 - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - cross-env: - specifier: ^7.0.3 - version: 7.0.3 - prisma: - specifier: 5.4.2 - version: 5.4.2 - tsx: - specifier: ^3.12.7 - version: 3.12.7 +dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 + '@prisma/adapter-libsql': + specifier: ../adapter-libsql + version: link:../adapter-libsql + '@prisma/adapter-neon': + specifier: ../adapter-neon + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: ../adapter-pg + version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: ../adapter-planetscale + version: link:../adapter-planetscale + '@prisma/driver-adapter-utils': + specifier: ../driver-adapter-utils + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + undici: + specifier: ^5.26.5 + version: 5.26.5 + ws: + specifier: ^8.14.2 + version: 8.14.2 + +devDependencies: + '@types/node': + specifier: ^20.5.1 + version: 20.5.1 + tsup: + specifier: ^7.2.0 + version: 7.2.0(typescript@5.1.6) + tsx: + specifier: ^3.12.7 + version: 3.12.7 + typescript: + specifier: ^5.1.6 + version: 5.1.6 packages: - /@esbuild-kit/cjs-loader@2.4.2: - resolution: {integrity: sha512-BDXFbYOJzT/NBEtp71cvsrGPwGAMGRB/349rwKuoxNSiKjPraNNnlK6MIIabViCjqZugu6j+xeMDlEkWdHHJSg==} + /@esbuild-kit/cjs-loader@2.4.4: + resolution: {integrity: sha512-NfsJX4PdzhwSkfJukczyUiZGc7zNNWZcEAyqeISpDnn0PTfzMJR1aR8xAIPskBejIxBJbIgCCMzbaYa9SXepIg==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true - /@esbuild-kit/core-utils@3.2.2: - resolution: {integrity: sha512-Ub6LaRaAgF80dTSzUdXpFLM1pVDdmEVB9qb5iAzSpyDlX/mfJTFGOnZ516O05p5uWWteNviMKi4PAyEuRxI5gA==} + /@esbuild-kit/core-utils@3.3.2: + resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==} dependencies: esbuild: 0.18.20 source-map-support: 0.5.21 dev: true - /@esbuild-kit/esm-loader@2.5.5: - resolution: {integrity: sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw==} + /@esbuild-kit/esm-loader@2.6.5: + resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 dev: true /@esbuild/android-arm64@0.18.20: @@ -406,7 +288,7 @@ packages: dependencies: '@jridgewell/set-array': 1.1.2 '@jridgewell/sourcemap-codec': 1.4.15 - '@jridgewell/trace-mapping': 0.3.19 + '@jridgewell/trace-mapping': 0.3.20 dev: true /@jridgewell/resolve-uri@3.1.1: @@ -423,8 +305,8 @@ packages: resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} dev: true - /@jridgewell/trace-mapping@0.3.19: - resolution: {integrity: sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==} + /@jridgewell/trace-mapping@0.3.20: + resolution: {integrity: sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==} dependencies: '@jridgewell/resolve-uri': 3.1.1 '@jridgewell/sourcemap-codec': 1.4.15 @@ -435,24 +317,27 @@ packages: dependencies: '@libsql/hrana-client': 0.5.5 js-base64: 3.7.5 - libsql: 0.1.28 + libsql: 0.1.34 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate + dev: false - /@libsql/darwin-arm64@0.1.28: - resolution: {integrity: sha512-p4nldHUOhcl9ibnH1F6oiXV5Dl3PAcPB9VIjdjVvO3/URo5J7mhqRMuwJMKO5DZJJGtkKJ5IO0gu0hc90rnKIg==} + /@libsql/darwin-arm64@0.1.34: + resolution: {integrity: sha512-Wv8jvkj/fUAO8DF3A4HaddCMldUUpKcg/WW1sY95FNsSHOxktyxqU80jAp/tCuZ85GQIJozvgSr51/ARIC0gsw==} cpu: [arm64] os: [darwin] requiresBuild: true + dev: false optional: true - /@libsql/darwin-x64@0.1.28: - resolution: {integrity: sha512-WaEK+Z+wP5sr0h8EcusSGHv4Mqc3smYICeG4P/wsbRDKQ2WUMWqZrpgqaBsm+WPbXogU2vpf+qGc8BnpFZ0ggw==} + /@libsql/darwin-x64@0.1.34: + resolution: {integrity: sha512-2NQXD9nUzC08hg7FdcZLq5uTEwGz1KbD7YvUzQb/psO1lO/E/p83wl1es1082+Pp0z5pSPDWQeRTuccD41L+3w==} cpu: [x64] os: [darwin] requiresBuild: true + dev: false optional: true /@libsql/hrana-client@0.5.5: @@ -466,59 +351,76 @@ packages: - bufferutil - encoding - utf-8-validate + dev: false /@libsql/isomorphic-fetch@0.1.10: resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} dependencies: - '@types/node-fetch': 2.6.6 + '@types/node-fetch': 2.6.7 node-fetch: 2.7.0 transitivePeerDependencies: - encoding + dev: false /@libsql/isomorphic-ws@0.1.5: resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} dependencies: - '@types/ws': 8.5.5 + '@types/ws': 8.5.8 ws: 8.14.2 transitivePeerDependencies: - bufferutil - utf-8-validate + dev: false - /@libsql/linux-arm64-gnu@0.1.28: - resolution: {integrity: sha512-a17ANBuOqH2L8gdyET4Kg3XggQvxWnoA+7x7sDEX5NyWNyvr7P04WzNPAT0xAOWLclC1fDD6jM5sh/fbJk/7NA==} + /@libsql/linux-arm64-gnu@0.1.34: + resolution: {integrity: sha512-r3dY1FDYZ7eX5HX7HyAoYSqK5FPugj5NSB5Bt/nz+ygBWdXASgSKxkE/RqjJIM59vXwv300iJX9qhR5fXv8sTw==} cpu: [arm64] os: [linux] requiresBuild: true + dev: false + optional: true + + /@libsql/linux-arm64-musl@0.1.34: + resolution: {integrity: sha512-9AE/eNb9eQRcNsLxqtpLJxVEoIMmItrdwqJDImPJtOp10rhp4U0x/9RGKerl9Mg3ObVj676pyhAR2KzyudrOfQ==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false optional: true - /@libsql/linux-x64-gnu@0.1.28: - resolution: {integrity: sha512-dkg+Ou7ApV0PHpZWd9c6NrYyc/WSNn5h/ScKotaMTLWlLL96XAMNwrYLpZpUj61I2y7QzU98XtMfiSD1Ux+VaA==} + /@libsql/linux-x64-gnu@0.1.34: + resolution: {integrity: sha512-o8toY1Txstjt13fBhZbFe8sNAW6OaS6qVcp1Bd6bHkCLSBLZ6pjJmwzQN8rFv9QFBPAnaKP3lI4vaOXXw7huTA==} cpu: [x64] os: [linux] requiresBuild: true + dev: false optional: true - /@libsql/linux-x64-musl@0.1.28: - resolution: {integrity: sha512-ZuOxCDYlG+f1IDsxstmaxLtgG9HvlLuUKs0X3um4f5F5V+P+PF8qr08gSdD1IP2pj+JBOiwhQffaEpR1wupxhQ==} + /@libsql/linux-x64-musl@0.1.34: + resolution: {integrity: sha512-EldEmcAxxNPSCjJ73oFxg81PDDIpDbPqK/QOrhmmGYLvYwrnQtVRUIbARf80JQvcy6bCxOO/Q9dh6wGhnyHyYA==} cpu: [x64] os: [linux] requiresBuild: true + dev: false optional: true - /@libsql/win32-x64-msvc@0.1.28: - resolution: {integrity: sha512-2cmUiMIsJLHpetebGeeYqUYaCPWEnwMjqxwu1ZEEbA5x8r+DNmIhLrc0QSQ29p7a5u14vbZnShNOtT/XG7vKew==} + /@libsql/win32-x64-msvc@0.1.34: + resolution: {integrity: sha512-jnv0qfVMnrVv00r+wUOe6DHrHuao9y1w1lN543cV2J1JdQNJT/eSZzhyZFSlS3T2ZUvXfZfZ5GeL8U18IAID6w==} cpu: [x64] os: [win32] requiresBuild: true + dev: false optional: true /@neon-rs/load@0.0.4: resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + dev: false /@neondatabase/serverless@0.6.0: resolution: {integrity: sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==} dependencies: '@types/pg': 8.6.6 + dev: false /@nodelib/fs.scandir@2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -544,73 +446,39 @@ packages: /@planetscale/database@1.11.0: resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} engines: {node: '>=16'} - - /@prisma/client@5.4.2(prisma@5.4.2): - resolution: {integrity: sha512-2xsPaz4EaMKj1WS9iW6MlPhmbqtBsXAOeVttSePp8vTFTtvzh2hZbDgswwBdSCgPzmmwF+tLB259QzggvCmJqA==} - engines: {node: '>=16.13'} - requiresBuild: true - peerDependencies: - prisma: '*' - peerDependenciesMeta: - prisma: - optional: true - dependencies: - '@prisma/engines-version': 5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574 - prisma: 5.4.2 dev: false - /@prisma/engines-version@5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574: - resolution: {integrity: sha512-wvupDL4AA1vf4TQNANg7kR7y98ITqPsk6aacfBxZKtrJKRIsWjURHkZCGcQliHdqCiW/hGreO6d6ZuSv9MhdAA==} - dev: false - - /@prisma/engines@5.4.2: - resolution: {integrity: sha512-fqeucJ3LH0e1eyFdT0zRx+oETLancu5+n4lhiYECyEz6H2RDskPJHJYHkVc0LhkU4Uv7fuEnppKU3nVKNzMh8g==} - requiresBuild: true - - /@types/debug@4.1.8: - resolution: {integrity: sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==} + /@types/node-fetch@2.6.7: + resolution: {integrity: sha512-lX17GZVpJ/fuCjguZ5b3TjEbSENxmEk1B2z02yoXSK9WMEWRivhdSY73wWMn6bpcCDAOh6qAdktpKHIlkDk2lg==} dependencies: - '@types/ms': 0.7.31 - dev: true - - /@types/ms@0.7.31: - resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} - dev: true - - /@types/node-fetch@2.6.6: - resolution: {integrity: sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==} - dependencies: - '@types/node': 20.6.5 + '@types/node': 20.5.1 form-data: 4.0.0 + dev: false /@types/node@20.5.1: resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} - dev: true - - /@types/node@20.5.9: - resolution: {integrity: sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ==} - - /@types/node@20.6.5: - resolution: {integrity: sha512-2qGq5LAOTh9izcc0+F+dToFigBWiK1phKPt7rNhOqJSr35y8rlIBjDwGtFSgAI6MGIhjwOVNSQZVdJsZJ2uR1w==} /@types/pg@8.10.2: resolution: {integrity: sha512-MKFs9P6nJ+LAeHLU3V0cODEOgyThJ3OAnmOlsZsxux6sfQs3HRXR5bBn7xG5DjckEFhTAxsXi7k7cd0pCMxpJw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 4.0.1 + dev: false /@types/pg@8.6.6: resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} dependencies: - '@types/node': 20.5.9 + '@types/node': 20.5.1 pg-protocol: 1.6.0 pg-types: 2.2.0 + dev: false - /@types/ws@8.5.5: - resolution: {integrity: sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==} + /@types/ws@8.5.8: + resolution: {integrity: sha512-flUksGIQCnJd6sZ1l5dqCEG/ksaoAg/eUwiLAGTJQcfgvZJKF++Ta4bJA6A5aPSJmsr+xlseHn4KLgVlNnvPTg==} dependencies: - '@types/node': 20.6.5 + '@types/node': 20.5.1 + dev: false /any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} @@ -629,14 +497,9 @@ packages: engines: {node: '>=8'} dev: true - /async-mutex@0.4.0: - resolution: {integrity: sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA==} - dependencies: - tslib: 2.6.2 - dev: false - /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false /balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} @@ -668,9 +531,10 @@ packages: /buffer-writer@2.0.0: resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} engines: {node: '>=4'} + dev: false - /bundle-require@4.0.1(esbuild@0.18.20): - resolution: {integrity: sha512-9NQkRHlNdNpDBGmLpngF3EFDcwodhMUuLz9PaWYciVcQF9SE4LFjM2DB/xV1Li5JiuDMv7ZUWuC3rGbqR0MAXQ==} + /bundle-require@4.0.2(esbuild@0.18.20): + resolution: {integrity: sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} peerDependencies: esbuild: '>=0.17' @@ -704,6 +568,7 @@ packages: engines: {node: '>= 0.8'} dependencies: delayed-stream: 1.0.0 + dev: false /commander@4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} @@ -714,21 +579,6 @@ packages: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} dev: true - /copy-anything@3.0.5: - resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} - engines: {node: '>=12.13'} - dependencies: - is-what: 4.1.15 - dev: false - - /cross-env@7.0.3: - resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} - engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} - hasBin: true - dependencies: - cross-spawn: 7.0.3 - dev: true - /cross-spawn@7.0.3: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} @@ -741,6 +591,7 @@ packages: /data-uri-to-buffer@4.0.1: resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} engines: {node: '>= 12'} + dev: false /debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} @@ -752,14 +603,17 @@ packages: optional: true dependencies: ms: 2.1.2 + dev: true /delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + dev: false /detect-libc@2.0.2: resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} engines: {node: '>=8'} + dev: false /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} @@ -836,6 +690,7 @@ packages: dependencies: node-domexception: 1.0.0 web-streams-polyfill: 3.2.1 + dev: false /fill-range@7.0.1: resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} @@ -851,12 +706,14 @@ packages: asynckit: 0.4.0 combined-stream: 1.0.8 mime-types: 2.1.35 + dev: false /formdata-polyfill@4.0.10: resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} engines: {node: '>=12.20.0'} dependencies: fetch-blob: 3.2.0 + dev: false /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -875,8 +732,8 @@ packages: engines: {node: '>=10'} dev: true - /get-tsconfig@4.7.0: - resolution: {integrity: sha512-pmjiZ7xtB8URYm74PlGJozDNyhvsVLUcpBa8DZBG3bWHwaHa9bPiRpiSfovw+fjhwONSCWKRyk+JQHEGZmMrzw==} + /get-tsconfig@4.7.2: + resolution: {integrity: sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==} dependencies: resolve-pkg-maps: 1.0.0 dev: true @@ -961,11 +818,6 @@ packages: engines: {node: '>=8'} dev: true - /is-what@4.1.15: - resolution: {integrity: sha512-uKua1wfy3Yt+YqsD6mTUEa2zSi3G1oPlqTflgaPJ7z63vUGN5pxFpnQfeSLMFnJDEsdvOtkp1rUWkYjB4YfhgA==} - engines: {node: '>=12.13'} - dev: false - /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} dev: true @@ -977,21 +829,24 @@ packages: /js-base64@3.7.5: resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + dev: false - /libsql@0.1.28: - resolution: {integrity: sha512-yCKlT0ntV8ZIWTPGNClhQQeH/LNAzLjbbEgBvgLb+jfQwAuTbyvPpVVLwkZzesqja1nbkWApztW0pX81Jp0pkw==} + /libsql@0.1.34: + resolution: {integrity: sha512-LGofp7z7gi1Td6vu2GxaA4WyvSPEkuFn0f/ePSti1TsAlBU0LWxdk+bj9D8nqswzxiqe5wpAyTLhVzTIYSyXEA==} cpu: [x64, arm64] os: [darwin, linux, win32] dependencies: '@neon-rs/load': 0.0.4 detect-libc: 2.0.2 optionalDependencies: - '@libsql/darwin-arm64': 0.1.28 - '@libsql/darwin-x64': 0.1.28 - '@libsql/linux-arm64-gnu': 0.1.28 - '@libsql/linux-x64-gnu': 0.1.28 - '@libsql/linux-x64-musl': 0.1.28 - '@libsql/win32-x64-msvc': 0.1.28 + '@libsql/darwin-arm64': 0.1.34 + '@libsql/darwin-x64': 0.1.34 + '@libsql/linux-arm64-gnu': 0.1.34 + '@libsql/linux-arm64-musl': 0.1.34 + '@libsql/linux-x64-gnu': 0.1.34 + '@libsql/linux-x64-musl': 0.1.34 + '@libsql/win32-x64-msvc': 0.1.34 + dev: false /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} @@ -1031,12 +886,14 @@ packages: /mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} + dev: false /mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} dependencies: mime-db: 1.52.0 + dev: false /mimic-fn@2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} @@ -1051,6 +908,7 @@ packages: /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true /mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -1063,6 +921,7 @@ packages: /node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} + dev: false /node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} @@ -1074,6 +933,7 @@ packages: optional: true dependencies: whatwg-url: 5.0.0 + dev: false /node-fetch@3.3.2: resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} @@ -1082,6 +942,7 @@ packages: data-uri-to-buffer: 4.0.1 fetch-blob: 3.2.0 formdata-polyfill: 4.0.10 + dev: false /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} @@ -1102,6 +963,7 @@ packages: /obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + dev: false /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -1118,6 +980,7 @@ packages: /packet-reader@1.0.0: resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} + dev: false /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} @@ -1137,18 +1000,22 @@ packages: /pg-cloudflare@1.1.1: resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} requiresBuild: true + dev: false optional: true /pg-connection-string@2.6.2: resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} + dev: false /pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} + dev: false /pg-numeric@1.0.2: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} + dev: false /pg-pool@3.6.1(pg@8.11.3): resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} @@ -1156,9 +1023,11 @@ packages: pg: '>=8.0' dependencies: pg: 8.11.3 + dev: false /pg-protocol@1.6.0: resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} + dev: false /pg-types@2.2.0: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} @@ -1169,6 +1038,7 @@ packages: postgres-bytea: 1.0.0 postgres-date: 1.0.7 postgres-interval: 1.2.0 + dev: false /pg-types@4.0.1: resolution: {integrity: sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==} @@ -1181,6 +1051,7 @@ packages: postgres-date: 2.0.1 postgres-interval: 3.0.0 postgres-range: 1.1.3 + dev: false /pg@8.11.3: resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} @@ -1200,11 +1071,13 @@ packages: pgpass: 1.0.5 optionalDependencies: pg-cloudflare: 1.1.1 + dev: false /pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} dependencies: split2: 4.2.0 + dev: false /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} @@ -1229,55 +1102,56 @@ packages: optional: true dependencies: lilconfig: 2.1.0 - yaml: 2.3.2 + yaml: 2.3.3 dev: true /postgres-array@2.0.0: resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} engines: {node: '>=4'} + dev: false /postgres-array@3.0.2: resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} engines: {node: '>=12'} + dev: false /postgres-bytea@1.0.0: resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} engines: {node: '>=0.10.0'} + dev: false /postgres-bytea@3.0.0: resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==} engines: {node: '>= 6'} dependencies: obuf: 1.1.2 + dev: false /postgres-date@1.0.7: resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} engines: {node: '>=0.10.0'} + dev: false /postgres-date@2.0.1: resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} engines: {node: '>=12'} + dev: false /postgres-interval@1.2.0: resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} engines: {node: '>=0.10.0'} dependencies: xtend: 4.0.2 + dev: false /postgres-interval@3.0.0: resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} engines: {node: '>=12'} + dev: false /postgres-range@1.1.3: resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - - /prisma@5.4.2: - resolution: {integrity: sha512-GDMZwZy7mysB2oXU+angQqJ90iaPFdD0rHaZNkn+dio5NRkGLmMqmXs31//tg/qXT3iB0cTQwnGGQNuirhSTZg==} - engines: {node: '>=16.13'} - hasBin: true - requiresBuild: true - dependencies: - '@prisma/engines': 5.4.2 + dev: false /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} @@ -1309,8 +1183,8 @@ packages: engines: {iojs: '>=1.0.0', node: '>=0.10.0'} dev: true - /rollup@3.28.1: - resolution: {integrity: sha512-R9OMQmIHJm9znrU3m3cpE8uhN0fGdXiawME7aZIpQqvpS/85+Vt1Hq1/yVIcYfOmaQiHjvXkQAoJukvLpau6Yw==} + /rollup@3.29.4: + resolution: {integrity: sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==} engines: {node: '>=14.18.0', npm: '>=8.0.0'} hasBin: true optionalDependencies: @@ -1366,6 +1240,7 @@ packages: /split2@4.2.0: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} + dev: false /strip-final-newline@2.0.0: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} @@ -1386,13 +1261,6 @@ packages: ts-interface-checker: 0.1.13 dev: true - /superjson@1.13.1: - resolution: {integrity: sha512-AVH2eknm9DEd3qvxM4Sq+LTCkSXE2ssfh1t11MHMXyYXFQyQ1HLgVvV+guLTsaQnJU3gnaVo34TohHPulY/wLg==} - engines: {node: '>=10'} - dependencies: - copy-anything: 3.0.5 - dev: false - /thenify-all@1.6.0: resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} engines: {node: '>=0.8'} @@ -1415,6 +1283,7 @@ packages: /tr46@0.0.3: resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false /tr46@1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} @@ -1431,10 +1300,6 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true - /tslib@2.6.2: - resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} - dev: false - /tsup@7.2.0(typescript@5.1.6): resolution: {integrity: sha512-vDHlczXbgUvY3rWvqFEbSqmC1L7woozbzngMqTtL2PGBODTtWlRwGDDawhvWzr5c1QjKe4OAKqJGfE1xeXUvtQ==} engines: {node: '>=16.14'} @@ -1451,7 +1316,7 @@ packages: typescript: optional: true dependencies: - bundle-require: 4.0.1(esbuild@0.18.20) + bundle-require: 4.0.2(esbuild@0.18.20) cac: 6.7.14 chokidar: 3.5.3 debug: 4.3.4 @@ -1461,7 +1326,7 @@ packages: joycon: 3.1.1 postcss-load-config: 4.0.1 resolve-from: 5.0.0 - rollup: 3.28.1 + rollup: 3.29.4 source-map: 0.8.0-beta.0 sucrase: 3.34.0 tree-kill: 1.2.2 @@ -1475,9 +1340,9 @@ packages: resolution: {integrity: sha512-C2Ip+jPmqKd1GWVQDvz/Eyc6QJbGfE7NrR3fx5BpEHMZsEHoIxHL1j+lKdGobr8ovEyqeNkPLSKp6SCSOt7gmw==} hasBin: true dependencies: - '@esbuild-kit/cjs-loader': 2.4.2 - '@esbuild-kit/core-utils': 3.2.2 - '@esbuild-kit/esm-loader': 2.5.5 + '@esbuild-kit/cjs-loader': 2.4.4 + '@esbuild-kit/core-utils': 3.3.2 + '@esbuild-kit/esm-loader': 2.6.5 optionalDependencies: fsevents: 2.3.3 dev: true @@ -1488,8 +1353,8 @@ packages: hasBin: true dev: true - /undici@5.26.2: - resolution: {integrity: sha512-a4PDLQgLTPHVzOK+x3F79/M4GtyYPl+aX9AAK7aQxpwxDwCqkeZCScy7Gk5kWT3JtdFq1uhO3uZJdLtHI4dK9A==} + /undici@5.26.5: + resolution: {integrity: sha512-cSb4bPFd5qgR7qr2jYAi0hlX9n5YKK2ONKkLFkxl+v/9BvC0sOpZjBHDBSXc5lWAf5ty9oZdRXytBIHzgUcerw==} engines: {node: '>=14.0'} dependencies: '@fastify/busboy': 2.0.0 @@ -1498,9 +1363,11 @@ packages: /web-streams-polyfill@3.2.1: resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} engines: {node: '>= 8'} + dev: false /webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: false /webidl-conversions@4.0.2: resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} @@ -1511,6 +1378,7 @@ packages: dependencies: tr46: 0.0.3 webidl-conversions: 3.0.1 + dev: false /whatwg-url@7.1.0: resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} @@ -1543,12 +1411,14 @@ packages: optional: true utf-8-validate: optional: true + dev: false /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} + dev: false - /yaml@2.3.2: - resolution: {integrity: sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==} + /yaml@2.3.3: + resolution: {integrity: sha512-zw0VAJxgeZ6+++/su5AFoqBbZbrEakwu+X0M5HmcwUiBL7AzcuPKjj5we4xfQLp78LkEMpD0cOnUhmgOVy3KdQ==} engines: {node: '>= 14'} dev: true diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh b/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh rename to query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts similarity index 99% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/index.ts index 8a05a6b2e9aa..b89348fb3e77 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts @@ -9,7 +9,8 @@ import * as prismaPg from '@prisma/adapter-pg' // neon dependencies import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' -import { fetch, WebSocket } from 'undici' +import { fetch } from 'undici' +import { WebSocket } from 'ws' import * as prismaNeon from '@prisma/adapter-neon' // libsql dependencies diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts similarity index 92% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts index 764df8f6108d..186d7a9e80d2 100644 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts @@ -10,7 +10,7 @@ export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = path.join(dirname, `../../../../../target/debug/libquery_engine.${libExt}`) + const libQueryEnginePath = path.join(dirname, `../../../../target/debug/libquery_engine.${libExt}`) const libqueryEngine = { exports: {} as unknown as lib.Library } // @ts-ignore diff --git a/query-engine/driver-adapters/js/tsconfig.json b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json similarity index 99% rename from query-engine/driver-adapters/js/tsconfig.json rename to query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json index b405cea50201..516c114b3e15 100644 --- a/query-engine/driver-adapters/js/tsconfig.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json @@ -20,4 +20,4 @@ "resolveJsonModule": true }, "exclude": ["**/dist", "**/declaration", "**/node_modules", "**/src/__tests__"] -} +} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/.gitignore b/query-engine/driver-adapters/js/.gitignore deleted file mode 100644 index e885963af278..000000000000 --- a/query-engine/driver-adapters/js/.gitignore +++ /dev/null @@ -1,44 +0,0 @@ -node_modules - -yarn-error.log -dist -build -tmp -pnpm-debug.log -sandbox -.DS_Store - -query-engine* -migration-engine* -schema-engine* -libquery_engine* -libquery-engine* -query_engine-windows.dll.node - -*tmp.db -dist/ -declaration/ - -*.tsbuildinfo -.prisma -.pnpm-store - -.vscode -!.vscode/launch.json.default -coverage - -.eslintcache - -.pnpm-debug.log - -.envrc - -esm -reproductions/* -!reproductions/basic-sqlite -!reproductions/tracing -!reproductions/pnpm-workspace.yaml - -dev.db -junit.xml -/output.txt diff --git a/query-engine/driver-adapters/js/.npmrc b/query-engine/driver-adapters/js/.npmrc deleted file mode 100644 index c87ec9b9e3d3..000000000000 --- a/query-engine/driver-adapters/js/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -git-checks=false -access=public diff --git a/query-engine/driver-adapters/js/.prettierrc.yml b/query-engine/driver-adapters/js/.prettierrc.yml deleted file mode 100644 index f0beb50a2167..000000000000 --- a/query-engine/driver-adapters/js/.prettierrc.yml +++ /dev/null @@ -1,5 +0,0 @@ -tabWidth: 2 -trailingComma: all -singleQuote: true -semi: false -printWidth: 120 diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md deleted file mode 100644 index 926d6db2b0a8..000000000000 --- a/query-engine/driver-adapters/js/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# Prisma Driver Adapters - - - - - - - -
- -This TypeScript monorepo contains the following packages: -- `@prisma/driver-adapter-utils` - - Internal set of utilities and types for Prisma's driver adapters. -- `@prisma/adapter-neon` - - Prisma's Driver Adapter that wraps the `@neondatabase/serverless` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:neon"` -- `@prisma/adapter-planetscale` - - Prisma's Driver Adapter that wraps the `@planetscale/database` driver - - It uses `provider = "mysql"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:planetscale"` -- `@prisma/adapter-pg` - - Prisma's Driver Adapter that wraps the `pg` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:pg"` - -## Get Started - -We assume Node.js `v18.16.1`+ is installed. If not, run `nvm use` in the current directory. -This is very important to double-check if you have multiple versions installed, as PlanetScale requires either Node.js `v18.16.1`+ or a custom `fetch` function. - -Install `pnpm` via: - -```sh -npm i -g pnpm -``` - -## Development - -- Install Node.js dependencies via `pnpm i` -- Build and link TypeScript packages via `pnpm build` -- Publish packages to `npm` via `pnpm publish -r` diff --git a/query-engine/driver-adapters/js/adapter-libsql/.gitignore b/query-engine/driver-adapters/js/adapter-libsql/.gitignore deleted file mode 100644 index c370cb644f95..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/.gitignore +++ /dev/null @@ -1 +0,0 @@ -test.db diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md deleted file mode 100644 index 5ca415ea8ec9..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ /dev/null @@ -1,95 +0,0 @@ -# @prisma/adapter-libsql - -Prisma driver adapter for Turso and libSQL. - -See https://prisma.io/turso for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [getting started with Turso](https://www.prisma.io/blog/prisma-turso-ea-support-rXGd_Tmy3UXX#create-a-database-on-turso), you can use the Turso serverless driver to connect to your database. You will need to install the `@prisma/adapter-libsql` driver adapter and the `@libsql/client` serverless driver. - -```sh -npm install @prisma/adapter-libsql -npm install @libsql/client -``` - -Make sure your Turso database connection string and authentication token is copied over to your `.env` file. The connection string will start with `libsql://`. - -```env -# .env -TURSO_AUTH_TOKEN="eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9..." -TURSO_DATABASE_URL="libsql://turso-prisma-random-user.turso.io" -``` - -You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = "file:./dev.db" -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to setup migrations - -As Turso needs to sync between a local sqlite database and another one hosted on Turso Cloud, an additional migration setup is needed. In particular, anytime you modify models and relations in your `schema.prisma` file, you should: - -1. Create a baseline migration - -```sh -npx prisma migrate diff --from-empty \ - --to-schema-datamodel prisma/schema.prisma \ - --script > baseline.sql -``` - -2. Apply the migration to your Turso database - -```sh -turso db shell turso-prisma < baseline.sql -``` - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the libSQL serverless database driver -3. Instantiate the Prisma libSQL adapter with the libSQL serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { PrismaClient } from '@prisma/client'; -import { PrismaLibSQL } from '@prisma/adapter-libsql'; -import { createClient } from '@libsql/client'; - -// Setup -const connectionString = `${process.env.TURSO_DATABASE_URL}`; -const authToken = `${process.env.TURSO_AUTH_TOKEN}`; - -// Init prisma client -const libsql = createClient({ - url: connectionString, - authToken, -}); -const adapter = new PrismaLibSQL(libsql); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Your Prisma Client instance now uses a **single** remote Turso database. -You can take it a step further by setting up database replicas. Turso automatically picks the closest replica to your app for read queries when you create replicas. No additional logic is required to define how the routing of the read queries should be handled. Write queries will be forwarded to the primary database. -We encourage you to create an issue if you find something missing or run into a bug. - -If you have any feedback about our libSQL Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21345) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json deleted file mode 100644 index fbce33c98a29..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-libsql", - "version": "0.0.0", - "description": "Prisma's driver adapter for libSQL and Turso", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json", - "test": "node --loader tsx --test tests/*.test.mts" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alexey Orlenko ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "async-mutex": "0.4.0" - }, - "devDependencies": { - "@libsql/client": "0.3.5" - }, - "peerDependencies": { - "@libsql/client": "^0.3.5" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts deleted file mode 100644 index b2fa4b5b4095..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts +++ /dev/null @@ -1,161 +0,0 @@ -import { ColumnTypeEnum, ColumnType, Debug } from '@prisma/driver-adapter-utils' -import { Row, Value } from '@libsql/client' -import { isArrayBuffer } from 'node:util/types' - -const debug = Debug('prisma:driver-adapter:libsql:conversion') - -// Mirrors sqlite/conversion.rs in quaint -function mapDeclType(declType: string): ColumnType | null { - switch (declType.toUpperCase()) { - case '': - return null - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'FLOAT': - return ColumnTypeEnum.Float - case 'DOUBLE': - case 'DOUBLE PRECISION': - case 'NUMERIC': - case 'REAL': - return ColumnTypeEnum.Double - case 'TINYINT': - case 'SMALLINT': - case 'MEDIUMINT': - case 'INT': - case 'INTEGER': - case 'SERIAL': - case 'INT2': - return ColumnTypeEnum.Int32 - case 'BIGINT': - case 'UNSIGNED BIG INT': - case 'INT8': - return ColumnTypeEnum.Int64 - case 'DATETIME': - case 'TIMESTAMP': - return ColumnTypeEnum.DateTime - case 'TIME': - return ColumnTypeEnum.Time - case 'DATE': - return ColumnTypeEnum.Date - case 'TEXT': - case 'CLOB': - case 'CHARACTER': - case 'VARCHAR': - case 'VARYING CHARACTER': - case 'NCHAR': - case 'NATIVE CHARACTER': - case 'NVARCHAR': - return ColumnTypeEnum.Text - case 'BLOB': - return ColumnTypeEnum.Bytes - case 'BOOLEAN': - return ColumnTypeEnum.Boolean - default: - debug('unknown decltype:', declType) - return null - } -} - -function mapDeclaredColumnTypes(columntTypes: string[]): [out: Array, empty: Set] { - const emptyIndices = new Set() - const result = columntTypes.map((typeName, index) => { - const mappedType = mapDeclType(typeName) - if (mappedType === null) { - emptyIndices.add(index) - } - return mappedType - }) - return [result, emptyIndices] -} - -export function getColumnTypes(declaredTypes: string[], rows: Row[]): ColumnType[] { - const [columnTypes, emptyIndices] = mapDeclaredColumnTypes(declaredTypes) - - if (emptyIndices.size === 0) { - return columnTypes as ColumnType[] - } - - columnLoop: for (const columnIndex of emptyIndices) { - // No declared column type in db schema, infer using first non-null value - for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { - const candidateValue = rows[rowIndex][columnIndex] - if (candidateValue !== null) { - columnTypes[columnIndex] = inferColumnType(candidateValue) - continue columnLoop - } - } - - // No non-null value found for this column, fall back to int32 to mimic what quaint does - columnTypes[columnIndex] = ColumnTypeEnum.Int32 - } - - return columnTypes as ColumnType[] -} - -function inferColumnType(value: NonNullable): ColumnType { - switch (typeof value) { - case 'string': - return ColumnTypeEnum.Text - case 'bigint': - return ColumnTypeEnum.Int64 - case 'boolean': - return ColumnTypeEnum.Boolean - case 'number': - return ColumnTypeEnum.UnknownNumber - case 'object': - return inferObjectType(value) - default: - throw new UnexpectedTypeError(value) - } -} - -function inferObjectType(value: {}): ColumnType { - if (isArrayBuffer(value)) { - return ColumnTypeEnum.Bytes - } - throw new UnexpectedTypeError(value) -} - -class UnexpectedTypeError extends Error { - name = 'UnexpectedTypeError' - constructor(value: unknown) { - const type = typeof value - const repr = type === 'object' ? JSON.stringify(value) : String(value) - super(`unexpected value of type ${type}: ${repr}`) - } -} - -export function mapRow(row: Row, columnTypes: ColumnType[]): unknown[] { - // `Row` doesn't have map, so we copy the array once and modify it in-place - // to avoid allocating and copying twice if we used `Array.from(row).map(...)`. - const result: unknown[] = Array.from(row) - - for (let i = 0; i < result.length; i++) { - const value = result[i] - - // Convert bigint to string as we can only use JSON-encodable types here - if (typeof value === 'bigint') { - result[i] = value.toString() - } - - // Convert array buffers to arrays of bytes. - // Base64 would've been more efficient but would collide with the existing - // logic that treats string values of type Bytes as raw UTF-8 bytes that was - // implemented for other adapters. - if (isArrayBuffer(value)) { - result[i] = Array.from(new Uint8Array(value)) - } - - // If an integer is required and the current number isn't one, - // discard the fractional part. - if ( - typeof value === 'number' && - (columnTypes[i] === ColumnTypeEnum.Int32 || columnTypes[i] === ColumnTypeEnum.Int64) && - !Number.isInteger(value) - ) { - result[i] = Math.trunc(value) - } - } - - return result -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts b/query-engine/driver-adapters/js/adapter-libsql/src/index.ts deleted file mode 100644 index 04a95cc4cfcd..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaLibSQL } from './libsql' diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts deleted file mode 100644 index 6528c8f44a8a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import type { - InStatement, - Client as LibSqlClientRaw, - Transaction as LibSqlTransactionRaw, - ResultSet as LibSqlResultSet, -} from '@libsql/client' -import { Mutex } from 'async-mutex' -import { getColumnTypes, mapRow } from './conversion' - -const debug = Debug('prisma:driver-adapter:libsql') - -type StdClient = LibSqlClientRaw -type TransactionClient = LibSqlTransactionRaw - -const LOCK_TAG = Symbol() - -class LibSqlQueryable implements Queryable { - readonly flavour = 'sqlite'; - - [LOCK_TAG] = new Mutex() - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - - return ioResult.map(({ columns, rows, columnTypes: declaredColumnTypes }) => { - const columnTypes = getColumnTypes(declaredColumnTypes, rows) - - return { - columnNames: columns, - columnTypes, - rows: rows.map((row) => mapRow(row, columnTypes)), - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise> { - const release = await this[LOCK_TAG].acquire() - try { - const result = await this.client.execute(query as InStatement) - return ok(result) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - const rawCode = error['rawCode'] ?? e.cause?.['rawCode'] - if (typeof rawCode === 'number') { - return err({ - kind: 'Sqlite', - extendedCode: rawCode, - message: error.message, - }) - } - throw error - } finally { - release() - } - } -} - -class LibSqlTransaction extends LibSqlQueryable implements Transaction { - finished = false - - constructor(client: TransactionClient, readonly options: TransactionOptions, readonly unlockParent: () => void) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - - try { - await this.client.commit() - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - - try { - await this.client.rollback() - } catch (error) { - debug('error in rollback:', error) - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.finished = true - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaLibSQL extends LibSqlQueryable implements DriverAdapter { - constructor(client: StdClient) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const release = await this[LOCK_TAG].acquire() - - try { - const tx = await this.client.transaction('deferred') - return ok(new LibSqlTransaction(tx, options, release)) - } catch (e) { - // note: we only release the lock if creating the transaction fails, it must stay locked otherwise, - // hence `catch` and rethrowing the error and not `finally`. - release() - throw e - } - } - - async close(): Promise> { - await this[LOCK_TAG].acquire() - this.client.close() - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts b/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts deleted file mode 100644 index f7f1b474a300..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts +++ /dev/null @@ -1,151 +0,0 @@ -import assert from 'node:assert/strict' -import { describe, it } from 'node:test' -import { Config, createClient } from '@libsql/client' -import { PrismaLibSQL } from '../dist/index.js' -import { ColumnTypeEnum } from '@jkomyno/prisma-driver-adapter-utils' - -function connect(config?: Partial): PrismaLibSQL { - const client = createClient({ url: 'file:test.db', ...config }) - return new PrismaLibSQL(client) -} - -it('checks declared types', async () => { - const client = connect() - - await client.executeRaw({ - sql: ` - DROP TABLE IF EXISTS types; - `, - args: [], - }) - - await client.executeRaw({ - sql: ` - CREATE TABLE types ( - id INTEGER PRIMARY KEY, - real REAL, - bigint BIGINT, - date DATETIME, - text TEXT, - blob BLOB - ) - `, - args: [], - }) - - const result = await client.queryRaw({ - sql: ` - SELECT * FROM types - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ - ColumnTypeEnum.Int32, - ColumnTypeEnum.Double, - ColumnTypeEnum.Int64, - ColumnTypeEnum.DateTime, - ColumnTypeEnum.Text, - ColumnTypeEnum.Bytes, - ]) -}) - -it('infers types when sqlite decltype is not available', async () => { - const client = connect() - - const result = await client.queryRaw({ - sql: ` - SELECT 1 as first, 'test' as second - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ColumnTypeEnum.Int64, ColumnTypeEnum.Text]) -}) - -describe('int64 with different intMode', () => { - const N = 2n ** 63n - 1n - - it('correctly infers int64 with intMode=number for safe JS integers', async () => { - const client = connect({ intMode: 'number' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [Number.MAX_SAFE_INTEGER], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], Number.MAX_SAFE_INTEGER) - }) - - it("doesn't support very big int64 with intMode=number", async () => { - const client = connect({ intMode: 'number' }) - - assert.rejects( - client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }), - ) - }) - - it('correctly infers int64 with intMode=bigint', async () => { - const client = connect({ intMode: 'bigint' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - - // bigints are converted to strings because we can't currently pass a bigint - // to rust due to a napi.rs limitation - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it('correctly infers int64 with intMode=string when we have decltype', async () => { - const client = connect({ intMode: 'string' }) - - await client.executeRaw({ - sql: `DROP TABLE IF EXISTS test`, - args: [], - }) - - await client.executeRaw({ - sql: `CREATE TABLE test (int64 BIGINT)`, - args: [], - }) - - await client.executeRaw({ - sql: `INSERT INTO test (int64) VALUES (?)`, - args: [N], - }) - - const result = await client.queryRaw({ - sql: `SELECT int64 FROM test`, - args: [], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it("can't infer int64 with intMode=string without schema", async () => { - const client = connect({ intMode: 'string' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Text) - assert.equal(result.value.rows[0][0], N.toString()) - }) -}) diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md deleted file mode 100644 index f36f44c6bca4..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# @prisma/adapter-neon - -Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [creating your database on Neon](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you'll need to install the `@prisma/adapter-neon` driver adapter, Neon’s serverless database driver `@neondatabase/serverless`, and `ws` to set up a WebSocket connection for use by Neon. - -```sh -npm install @prisma/adapter-neon -npm install @neondatabase/serverless -npm install ws -``` - -Make sure your [Neon database connection string](https://neon.tech/docs/connect/connect-from-any-app) is copied over to your `.env` file. The connection string will start with `postgres://`. - -```env -# .env -DATABASE_URL="postgres://..." -``` - -Make sure you also include the `driverAdapters` Preview feature in your `schema.prisma`. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgresql" - url = env("DATABASE_URL") -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the Neon serverless database driver -3. Instantiate the Prisma Neon adapter with the Neon serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { Pool, neonConfig } from '@neondatabase/serverless'; -import { PrismaNeon } from '@prisma/adapter-neon'; -import { PrismaClient } from '@prisma/client'; -import ws from 'ws'; - -// Setup -neonConfig.webSocketConstructor = ws; -const connectionString = `${process.env.DATABASE_URL}`; - -// Init prisma client -const pool = new Pool({ connectionString }); -const adapter = new PrismaNeon(pool); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Now your code has built-in benefits of the Neon serverless driver, such as WebSocket connections and [message pipelining](https://neon.tech/blog/quicker-serverless-postgres), while Prisma covers connection creation and destruction, error handling, and type safety. If you have any feedback about our Neon Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21346) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json deleted file mode 100644 index 02005a13572f..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@prisma/adapter-neon", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "postgres-array": "^3.0.2" - }, - "devDependencies": { - "@neondatabase/serverless": "^0.6.0" - }, - "peerDependencies": { - "@neondatabase/serverless": "^0.6.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts deleted file mode 100644 index 78f285240599..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from '@neondatabase/serverless' -import { parse as parseArray } from 'postgres-array' - -const ScalarColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in ScalarColumnType). - * - * See the semantics of each of this code in: - * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat - */ -const ArrayColumnType = { - BIT_ARRAY: 1561, - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - CIDR_ARRAY: 651, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INET_ARRAY: 1041, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - INT8_ARRAY: 1016, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - OID_ARRAY: 1028, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARBIT_ARRAY: 1563, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: - return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: - return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: - return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: - return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: - return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: - return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: - return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.VARBIT_ARRAY: - case ArrayColumnType.BIT_ARRAY: - case ArrayColumnType.INET_ARRAY: - case ArrayColumnType.CIDR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - case ArrayColumnType.INT8_ARRAY: - case ArrayColumnType.OID_ARRAY: - return ColumnTypeEnum.Int64Array - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -function normalize_array(element_normalizer: (string) => string): (string) => string[] { - return (str) => parseArray(str, element_normalizer) -} - -/****************************/ -/* Time-related data-types */ -/****************************/ - -function normalize_numeric(numeric: string): string { - return numeric -} - -types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - -/****************************/ -/* Time-related data-types */ -/****************************/ - - -function normalize_date(date: string): string { - return date -} - -function normalize_timestamp(time: string): string { - return time -} - -function normalize_timestampz(time: string): string { - return time.split("+")[0] -} - -/* - * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f - */ - -function normalize_time(time: string): string { - return time -} - -function normalize_timez(time: string): string { - // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver - // in quaint. See quaint/src/connector/postgres/conversion.rs - return time.split("+")[0] -} - -types.setTypeParser(ScalarColumnType.TIME, normalize_time) -types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -types.setTypeParser(ScalarColumnType.DATE, normalize_date) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - -/******************/ -/* Money handling */ -/******************/ - -function normalize_money(money: string): string { - return money.slice(1) -} - -types.setTypeParser(ScalarColumnType.MONEY, normalize_money) -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - - -/*****************/ -/* JSON handling */ -/*****************/ - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function toJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - - -types.setTypeParser(ScalarColumnType.JSONB, toJson) -types.setTypeParser(ScalarColumnType.JSON, toJson) - -/************************/ -/* Binary data handling */ -/************************/ - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -/* - * BYTEA - arbitrary raw binary strings - */ - -const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) - -/* - * BYTEA_ARRAY - arrays of arbitrary raw binary strings - */ - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map((buf) => buf ? encodeBuffer(buf) : null) -}) - -/* BIT_ARRAY, VARBIT_ARRAY */ - -function normalizeBit(bit: string): string { - return bit -} - -types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-neon/src/index.ts b/query-engine/driver-adapters/js/adapter-neon/src/index.ts deleted file mode 100644 index f160d413ade0..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaNeon, PrismaNeonHTTP } from './neon' diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts deleted file mode 100644 index e8fe40ada22f..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ /dev/null @@ -1,165 +0,0 @@ -import type neon from '@neondatabase/serverless' -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:neon') - -type ARRAY_MODE_ENABLED = true - -type PerformIOResult = neon.QueryResult | neon.FullQueryResults - -/** - * Base class for http client, ws client and ws transaction - */ -abstract class NeonQueryable implements Queryable { - readonly flavour = 'postgres' - - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map((r) => r.rowCount ?? 0) - } - - abstract performIO(query: Query): Promise> -} - -/** - * Base class for WS-based queryables: top-level client and transaction - */ -class NeonWsQueryable extends NeonQueryable { - constructor(protected client: ClientT) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - return ok(await this.client.query({ text: sql, values, rowMode: 'array' })) - } catch (e) { - debug('Error in performIO: %O', e) - if (e && e.code) { - return err({ - kind: 'Postgres', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw e - } - } -} - -class NeonTransaction extends NeonWsQueryable implements Transaction { - finished = false - - constructor(client: neon.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaNeon extends NeonWsQueryable implements DriverAdapter { - private isRunning = true - - constructor(pool: neon.Pool) { - super(pool) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new NeonTransaction(connection, options)) - } - - async close() { - if (this.isRunning) { - await this.client.end() - this.isRunning = false - } - return ok(undefined) - } -} - -export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - return ok( - await this.client(sql, values, { - arrayMode: true, - fullResults: true, - }), - ) - } - - startTransaction(): Promise> { - return Promise.reject(new Error('Transactions are not supported in HTTP mode')) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-pg/README.md b/query-engine/driver-adapters/js/adapter-pg/README.md deleted file mode 100644 index b8463742e25c..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/adapter-pg - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json deleted file mode 100644 index 7514569c562a..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-pg", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"pg\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Tom Houlé ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "postgres-array": "^3.0.2" - }, - "devDependencies": { - "pg": "^8.11.3", - "@types/pg": "^8.10.2" - }, - "peerDependencies": { - "pg": "^8.11.3" - } -} diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts deleted file mode 100644 index c26b13877927..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from 'pg' -import { parse as parseArray } from 'postgres-array' - -const ScalarColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in ScalarColumnType). - * - * See the semantics of each of this code in: - * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat - */ -const ArrayColumnType = { - BIT_ARRAY: 1561, - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - CIDR_ARRAY: 651, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INET_ARRAY: 1041, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - INT8_ARRAY: 1016, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - OID_ARRAY: 1028, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARBIT_ARRAY: 1563, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: - return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: - return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: - return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: - return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: - return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: - return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: - return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.VARBIT_ARRAY: - case ArrayColumnType.BIT_ARRAY: - case ArrayColumnType.INET_ARRAY: - case ArrayColumnType.CIDR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - case ArrayColumnType.INT8_ARRAY: - case ArrayColumnType.OID_ARRAY: - return ColumnTypeEnum.Int64Array - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -function normalize_array(element_normalizer: (string) => string): (string) => string[] { - return (str) => parseArray(str, element_normalizer) -} - -/****************************/ -/* Time-related data-types */ -/****************************/ - -function normalize_numeric(numeric: string): string { - return numeric -} - -types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - -/****************************/ -/* Time-related data-types */ -/****************************/ - - -function normalize_date(date: string): string { - return date -} - -function normalize_timestamp(time: string): string { - return time -} - -function normalize_timestampz(time: string): string { - return time.split("+")[0] -} - -/* - * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f - */ - -function normalize_time(time: string): string { - return time -} - -function normalize_timez(time: string): string { - // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver - // in quaint. See quaint/src/connector/postgres/conversion.rs - return time.split("+")[0] -} - -types.setTypeParser(ScalarColumnType.TIME, normalize_time) -types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -types.setTypeParser(ScalarColumnType.DATE, normalize_date) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - -/******************/ -/* Money handling */ -/******************/ - -function normalize_money(money: string): string { - return money.slice(1) -} - -types.setTypeParser(ScalarColumnType.MONEY, normalize_money) -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - - -/*****************/ -/* JSON handling */ -/*****************/ - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function toJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - - -types.setTypeParser(ScalarColumnType.JSONB, toJson) -types.setTypeParser(ScalarColumnType.JSON, toJson) - -/************************/ -/* Binary data handling */ -/************************/ - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -/* - * BYTEA - arbitrary raw binary strings - */ - -const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) - -/* - * BYTEA_ARRAY - arrays of arbitrary raw binary strings - */ - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map((buf) => buf ? encodeBuffer(buf) : null) -}) - -/* BIT_ARRAY, VARBIT_ARRAY */ - -function normalizeBit(bit: string): string { - return bit -} - -types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-pg/src/index.ts b/query-engine/driver-adapters/js/adapter-pg/src/index.ts deleted file mode 100644 index f8e51ac2685b..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPg } from './pg' diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts deleted file mode 100644 index c34050778c39..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ /dev/null @@ -1,138 +0,0 @@ -import type pg from 'pg' -import { Debug, err, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:pg') - -type StdClient = pg.Pool -type TransactionClient = pg.PoolClient - -class PgQueryable implements Queryable { - readonly flavour = 'postgres' - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - return ioResult.map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map(({ rowCount: rowsAffected }) => rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise>> { - const { sql, args: values } = query - - try { - const result = await this.client.query({ text: sql, values, rowMode: 'array' }) - return ok(result) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - if (e && e.code) { - return err({ - kind: 'Postgres', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw error - } - } -} - -class PgTransaction extends PgQueryable implements Transaction { - finished = false - - constructor(client: pg.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaPg extends PgQueryable implements DriverAdapter { - constructor(client: pg.Pool) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new PgTransaction(connection, options)) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md deleted file mode 100644 index a4cdc132036a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# @prisma/adapter-planetscale - -Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [getting started with PlanetScale](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you can use the PlanetScale serverless driver to connect to your database. You will need to install the `@prisma/adapter-planetscale` driver adapter, the `@planetscale/database` serverless driver, and `undici` to provide a `fetch` function to the PlanetScale driver. - -```sh -npm install @prisma/adapter-planetscale -npm install @planetscale/database -npm install undici -``` - -Make sure your [PlanetScale database connection string](https://planetscale.com/docs/concepts/connection-strings) is copied over to your `.env` file. The connection string will start with `mysql://`. - -```env -# .env -DATABASE_URL="mysql://..." -``` - -You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") - relationMode = "prisma" -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the PlanetScale serverless database driver -3. Instantiate the Prisma PlanetScale adapter with the PlanetScale serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { connect } from '@planetscale/database'; -import { PrismaPlanetScale } from '@prisma/adapter-planetscale'; -import { PrismaClient } from '@prisma/client'; -import { fetch as undiciFetch } from 'undici'; - -// Setup -const connectionString = `${process.env.DATABASE_URL}`; - -// Init prisma client -const connection = connect({ url: connectionString, fetch: undiciFetch }); -const adapter = new PrismaPlanetScale(connection); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Your Prisma Client instance now uses PlanetScale's [`database-js`](https://github.com/planetscale/database-js), which can improve [`connection reliability and performance`](https://planetscale.com/blog/faster-mysql-with-http3). It uses HTTP requests instead of Prisma’s connection pool, but Prisma will continue to handle error handling and type safety. If you have any feedback about our PlanetScale Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21347) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json deleted file mode 100644 index 59d59704ab50..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@prisma/adapter-planetscale", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@planetscale/database\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "@planetscale/database": "^1.11.0" - }, - "peerDependencies": { - "@planetscale/database": "^1.11.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts deleted file mode 100644 index f6cf8563dc24..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' - -// See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 -export type PlanetScaleColumnType - = 'NULL' - | 'INT8' - | 'UINT8' - | 'INT16' - | 'UINT16' - | 'INT24' - | 'UINT24' - | 'INT32' - | 'UINT32' - | 'INT64' - | 'UINT64' - | 'FLOAT32' - | 'FLOAT64' - | 'TIMESTAMP' - | 'DATE' - | 'TIME' - | 'DATETIME' - | 'YEAR' - | 'DECIMAL' - | 'TEXT' - | 'BLOB' - | 'VARCHAR' - | 'VARBINARY' - | 'CHAR' - | 'BINARY' - | 'BIT' - | 'ENUM' - | 'SET' // unsupported - | 'TUPLE' // unsupported - | 'GEOMETRY' - | 'JSON' - | 'EXPRESSION' // unsupported - | 'HEXNUM' - | 'HEXVAL' - | 'BITNUM' - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(field: PlanetScaleColumnType): ColumnType { - switch (field) { - case 'INT8': - case 'UINT8': - case 'INT16': - case 'UINT16': - case 'INT24': - case 'UINT24': - case 'INT32': - case 'UINT32': - case 'YEAR': - return ColumnTypeEnum.Int32 - case 'INT64': - case 'UINT64': - return ColumnTypeEnum.Int64 - case 'FLOAT32': - return ColumnTypeEnum.Float - case 'FLOAT64': - return ColumnTypeEnum.Double - case 'TIMESTAMP': - case 'DATETIME': - return ColumnTypeEnum.DateTime - case 'DATE': - return ColumnTypeEnum.Date - case 'TIME': - return ColumnTypeEnum.Time - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'CHAR': - return ColumnTypeEnum.Char - case 'TEXT': - case 'VARCHAR': - return ColumnTypeEnum.Text - case 'ENUM': - return ColumnTypeEnum.Enum - case 'JSON': - return ColumnTypeEnum.Json - case 'BLOB': - case 'BINARY': - case 'VARBINARY': - case 'BIT': - case 'BITNUM': - case 'HEXNUM': - case 'HEXVAL': - case 'GEOMETRY': - return ColumnTypeEnum.Bytes - case 'NULL': - // Fall back to Int32 for consistency with quaint. - return ColumnTypeEnum.Int32 - default: - throw new Error(`Unsupported column type: ${field}`) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts deleted file mode 100644 index 013409c8424f..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts +++ /dev/null @@ -1,13 +0,0 @@ -export type Deferred = { - resolve(value: T | PromiseLike): void; - reject(reason: unknown): void; -} - - -export function createDeferred(): [Deferred, Promise] { - const deferred = {} as Deferred - return [deferred, new Promise((resolve, reject) => { - deferred.resolve = resolve - deferred.reject = reject - })] -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts deleted file mode 100644 index 5e8add856fbb..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPlanetScale } from './planetscale' diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts deleted file mode 100644 index 5a52851112b2..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ /dev/null @@ -1,181 +0,0 @@ -import type planetScale from '@planetscale/database' -import { Debug, err, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' -import { createDeferred, Deferred } from './deferred' - -const debug = Debug('prisma:driver-adapter:planetscale') - -class RollbackError extends Error { - constructor() { - super('ROLLBACK') - this.name = 'RollbackError' - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, RollbackError) - } - } -} - -class PlanetScaleQueryable implements Queryable { - readonly flavour = 'mysql' - constructor(protected client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - return ioResult.map(({ fields, insertId: lastInsertId, rows }) => { - const columns = fields.map((field) => field.name) - return { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: rows as ResultSet['rows'], - lastInsertId, - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - const result = await this.client.execute(sql, values, { - as: 'array', - }) - return ok(result) - } catch (e) { - const error = e as Error - if (error.name === 'DatabaseError') { - const parsed = parseErrorMessage(error.message) - if (parsed) { - return err({ - kind: 'Mysql', - ...parsed, - }) - } - } - debug('Error in performIO: %O', error) - throw error - } - } -} - -function parseErrorMessage(message: string) { - const match = message.match( - /target: (?:.+?) vttablet: (?.+?) \(errno (?\d+)\) \(sqlstate (?.+?)\)/, - ) - - if (!match || !match.groups) { - return undefined - } - return { - code: Number(match.groups.code), - message: match.groups.message, - state: match.groups.state, - } -} - -class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { - finished = false - - constructor( - tx: planetScale.Transaction, - readonly options: TransactionOptions, - private txDeferred: Deferred, - private txResultPromise: Promise, - ) { - super(tx) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.txDeferred.resolve() - return Promise.resolve(ok(await this.txResultPromise)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.txDeferred.reject(new RollbackError()) - return Promise.resolve(ok(await this.txResultPromise)) - } - - dispose(): Result { - if (!this.finished) { - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaPlanetScale extends PlanetScaleQueryable implements DriverAdapter { - constructor(client: planetScale.Connection) { - super(client) - } - - async startTransaction() { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - return new Promise>((resolve, reject) => { - const txResultPromise = this.client - .transaction(async (tx) => { - const [txDeferred, deferredPromise] = createDeferred() - const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - - resolve(ok(txWrapper)) - return deferredPromise - }) - .catch((error) => { - // Rollback error is ignored (so that tx.rollback() won't crash) - // any other error is legit and is re-thrown - if (!(error instanceof RollbackError)) { - return reject(error) - } - - return undefined - }) - }) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/js/connector-test-kit-executor/package.json deleted file mode 100644 index 2a0d16bd4ccf..000000000000 --- a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "connector-test-kit-executor", - "version": "5.4.0", - "description": "", - "main": "dist/index.js", - "private": true, - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "keywords": [], - "author": "", - "sideEffects": false, - "license": "Apache-2.0", - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/driver-adapter-utils": "workspace:*", - "@types/pg": "^8.10.2", - "pg": "^8.11.3", - "undici": "^5.26.2" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/README.md b/query-engine/driver-adapters/js/driver-adapter-utils/README.md deleted file mode 100644 index 78938e802bd3..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/driver-adapters-utils - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json deleted file mode 100644 index 64301a7a5533..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "@prisma/driver-adapter-utils", - "version": "0.0.0", - "description": "Internal set of utilities and types for Prisma's driver adapters.", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "debug": "^4.3.4" - }, - "devDependencies": { - "@types/debug": "^4.1.8" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts deleted file mode 100644 index 1e3aa36210cf..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { Result, err, ok } from './result' -import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord } from './types' - -class ErrorRegistryInternal implements ErrorRegistry { - private registeredErrors: ErrorRecord[] = [] - - consumeError(id: number): ErrorRecord | undefined { - return this.registeredErrors[id] - } - - registerNewError(error: unknown) { - let i = 0 - while (this.registeredErrors[i] !== undefined) { - i++ - } - this.registeredErrors[i] = { error } - return i - } -} - -// *.bind(adapter) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -export const bindAdapter = (adapter: DriverAdapter): ErrorCapturingDriverAdapter => { - const errorRegistry = new ErrorRegistryInternal() - - const startTransaction = wrapAsync(errorRegistry, adapter.startTransaction.bind(adapter)) - return { - errorRegistry, - queryRaw: wrapAsync(errorRegistry, adapter.queryRaw.bind(adapter)), - executeRaw: wrapAsync(errorRegistry, adapter.executeRaw.bind(adapter)), - flavour: adapter.flavour, - startTransaction: async (...args) => { - const result = await startTransaction(...args) - return result.map((tx) => bindTransaction(errorRegistry, tx)) - }, - close: wrapAsync(errorRegistry, adapter.close.bind(adapter)), - } -} - -// *.bind(transaction) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Transaction): Transaction => { - return { - flavour: transaction.flavour, - options: transaction.options, - queryRaw: wrapAsync(errorRegistry, transaction.queryRaw.bind(transaction)), - executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), - commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), - rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), - dispose: wrapSync(errorRegistry, transaction.dispose.bind(transaction)), - } -} - -function wrapAsync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Promise>, -): (...args: A) => Promise> { - return async (...args) => { - try { - return await fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJs', id }) - } - } -} - -function wrapSync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Result, -): (...args: A) => Result { - return (...args) => { - try { - return fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJs', id }) - } - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts deleted file mode 100644 index 5ddc7f20b390..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ /dev/null @@ -1,48 +0,0 @@ -// Same order as in rust driver-adapters' `ColumnType`. -// Note: exporting const enums causes lots of problems with bundlers, so we emulate -// them via regular dictionaries. -// See: https://hackmd.io/@dzearing/Sk3xV0cLs -export const ColumnTypeEnum = { - // Scalars - Int32: 0, - Int64: 1, - Float: 2, - Double: 3, - Numeric: 4, - Boolean: 5, - Char: 6, - Text: 7, - Date: 8, - Time: 9, - DateTime: 10, - Json: 11, - Enum: 12, - Bytes: 13, - Set: 14, - Uuid: 15, - - // Arrays - Int32Array: 64, - Int64Array: 65, - FloatArray: 66, - DoubleArray: 67, - NumericArray: 68, - BooleanArray: 69, - CharArray: 70, - TextArray: 71, - DateArray: 72, - TimeArray: 73, - DateTimeArray: 74, - JsonArray: 75, - EnumArray: 76, - BytesArray: 77, - UuidArray: 78, - - // Custom - UnknownNumber: 128, -} as const - -// This string value paired with `ColumnType.Json` will be treated as JSON `null` -// when convering to a quaint value. This is to work around JS/JSON null values -// already being used to represent database NULLs. -export const JsonNullMarker = '$__prisma_null' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts deleted file mode 100644 index e0a1fe380fa2..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { debug as Debug } from 'debug' - -export { Debug } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts deleted file mode 100644 index e7c13be99966..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { bindAdapter } from './binder' -export { ColumnTypeEnum, JsonNullMarker } from './const' -export { Debug } from './debug' -export { ok, err, type Result } from './result' -export type * from './types' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts deleted file mode 100644 index 5af95db68671..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Error } from './types' -export type Result = { - // common methods - map(fn: (value: T) => U): Result - flatMap(fn: (value: T) => Result): Result -} & ( - | { - readonly ok: true - readonly value: T - } - | { - readonly ok: false - readonly error: Error - } -) - -export function ok(value: T): Result { - return { - ok: true, - value, - map(fn) { - return ok(fn(value)) - }, - flatMap(fn) { - return fn(value) - }, - } -} - -export function err(error: Error): Result { - return { - ok: false, - error, - map() { - return err(error) - }, - flatMap() { - return err(error) - }, - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts deleted file mode 100644 index 92019f81824b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { ColumnTypeEnum } from './const' -import { Result } from './result' - -export type ColumnType = (typeof ColumnTypeEnum)[keyof typeof ColumnTypeEnum] - -export interface ResultSet { - /** - * List of column types appearing in a database query, in the same order as `columnNames`. - * They are used within the Query Engine to convert values from JS to Quaint values. - */ - columnTypes: Array - - /** - * List of column names appearing in a database query, in the same order as `columnTypes`. - */ - columnNames: Array - - /** - * List of rows retrieved from a database query. - * Each row is a list of values, whose length matches `columnNames` and `columnTypes`. - */ - rows: Array> - - /** - * The last ID of an `INSERT` statement, if any. - * This is required for `AUTO_INCREMENT` columns in MySQL and SQLite-flavoured databases. - */ - lastInsertId?: string -} - -export type Query = { - sql: string - args: Array -} - -export type Error = - | { - kind: 'GenericJs' - id: number - } - | { - kind: 'Postgres' - code: string - severity: string - message: string - detail: string | undefined - column: string | undefined - hint: string | undefined - } - | { - kind: 'Mysql' - code: number - message: string - state: string - } - | { - kind: 'Sqlite' - /** - * Sqlite extended error code: https://www.sqlite.org/rescode.html - */ - extendedCode: number - message: string - } - -export interface Queryable { - readonly flavour: 'mysql' | 'postgres' | 'sqlite' - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the type-aware result set of the query. - * - * This is the preferred way of executing `SELECT` queries. - */ - queryRaw(params: Query): Promise> - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the number of affected rows. - * - * This is the preferred way of executing `INSERT`, `UPDATE`, `DELETE` queries, - * as well as transactional queries. - */ - executeRaw(params: Query): Promise> -} - -export interface DriverAdapter extends Queryable { - /** - * Starts new transation. - */ - startTransaction(): Promise> - - /** - * Closes the connection to the database, if any. - */ - close: () => Promise> -} - -export type TransactionOptions = { - usePhantomQuery: boolean -} - -export interface Transaction extends Queryable { - /** - * Transaction options. - */ - readonly options: TransactionOptions - /** - * Commit the transaction. - */ - commit(): Promise> - /** - * Rolls back the transaction. - */ - rollback(): Promise> - /** - * Discards and closes the transaction which may or may not have been committed or rolled back. - * This operation must be synchronous. If the implementation requires calling creating new - * asynchronous tasks on the event loop, the driver is responsible for handling the errors - * appropriately to ensure they don't crash the application. - */ - dispose(): Result -} - -export interface ErrorCapturingDriverAdapter extends DriverAdapter { - readonly errorRegistry: ErrorRegistry -} - -export interface ErrorRegistry { - consumeError(id: number): ErrorRecord | undefined -} - -export type ErrorRecord = { error: unknown } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json deleted file mode 100644 index 2c2e266bdb3b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration", - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/js/package.json deleted file mode 100644 index 2036794f8c02..000000000000 --- a/query-engine/driver-adapters/js/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "private": true, - "name": "js", - "version": "0.0.2", - "description": "", - "engines": { - "node": ">=16.13", - "pnpm": ">=8.6.6 <9" - }, - "license": "Apache-2.0", - "scripts": { - "build": "pnpm -r run build", - "lint": "pnpm -r run lint" - }, - "keywords": [], - "author": "", - "devDependencies": { - "@types/node": "^20.5.1", - "tsup": "^7.2.0", - "tsx": "^3.12.7", - "typescript": "^5.1.6" - } -} diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml deleted file mode 100644 index f9e70da7ee5a..000000000000 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ /dev/null @@ -1,8 +0,0 @@ -packages: - - './adapter-libsql' - - './adapter-neon' - - './adapter-pg' - - './adapter-planetscale' - - './connector-test-kit-executor' - - './driver-adapter-utils' - - './smoke-test-js' diff --git a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example deleted file mode 100644 index 15a286787cbd..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example +++ /dev/null @@ -1,26 +0,0 @@ -# Uncomment "source_up" if you need to load the .envrc at the root of the -# `prisma-engines` repository before loading this one (for example, if you -# are using Nix). -# -# source_up - -export JS_PLANETSCALE_DATABASE_URL="mysql://USER:PASSWORD@aws.connect.psdb.cloud/DATABASE?sslaccept=strict" -export JS_NEON_DATABASE_URL="postgres://USER:PASSWORD@DATABASE-pooler.eu-central-1.aws.neon.tech/neondb?pgbouncer=true&connect_timeout=10" - -# Note: if you use hosted Postgres instances (e.g., from PDP provision), you need `?sslmode=disable` -export JS_PG_DATABASE_URL="postgres://postgres:prisma@localhost:5438" - -# Set this to a `file:` URL when using a local sqlite database (either -# standalone or as an embedded replica). Otherwise, when using a remote Turso -# (or sqld) database in HTTP mode directly without an embedded replica, set its -# URL here. -export JS_LIBSQL_DATABASE_URL="file:${PWD}/libsql.db" - -# # Set this to the URL of remote Turso database when using an embedded replica. -# export JS_LIBSQL_SYNC_URL="" - -# # Provide an auth token when using a remote Turso database. -# export JS_LIBSQL_AUTH_TOKEN="" - -# Can be one of "number" (the default when nothing is specified), "bigint" or "string". "bigint" works best with Prisma. -export JS_LIBSQL_INT_MODE="bigint" diff --git a/query-engine/driver-adapters/js/smoke-test-js/.gitignore b/query-engine/driver-adapters/js/smoke-test-js/.gitignore deleted file mode 100644 index be550f99317f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -libsql.db -libsql.db-journal -libsql.db-shm -libsql.db-wal diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md deleted file mode 100644 index f1b81df5d268..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# @prisma/driver-adapters-smoke-tests-js - -This is a playground for testing the `libquery` client with the experimental Node.js drivers. -It contains a subset of `@prisma/client`, plus some handy executable smoke tests: -- [`./src/libquery`](./src/libquery): it contains smoke tests using a local `libquery`, the Query Engine library. -- [`./src/client`](./src/client): it contains smoke tests using `@prisma/client`. - -## How to setup - -We assume a recent Node.js is installed (e.g., `v20.5.x`). If not, run `nvm use` in the current directory. -It's very important to double-check if you have multiple versions installed, as both PlanetScale and Neon requires either Node.js `v18`+ or a custom `fetch` function. - -In the parent directory (`cd ..`): -- Build the driver adapters via `pnpm i && pnpm build` - -In the current directoy: -- Create a `.envrc` starting from `.envrc.example`, and fill in the missing values following the given template -- Install Node.js dependencies via - ```bash - pnpm i - ``` - -(or run `sh ./setup.sh`) - -Anywhere in the repository: -- Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine - -### PlanetScale - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database on [PlanetScale](https://planetscale.com/) -- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). -- Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. - -In the current directory: -- Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. -- Run `pnpm planetscale` to run smoke tests using `libquery` against the PlanetScale database. - For more fine-grained control: - - Run `pnpm planetscale:libquery` to test using `libquery` - - Run `pnpm planetscale:client` to test using `@prisma/client` - -### Neon - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). -- Paste the connection string to `JS_NEON_DATABASE_URL`. - -In the current directory: -- Run `pnpm prisma:neon` to push the Prisma schema and insert the test data. -- Run `pnpm neon:ws` to run smoke tests using `libquery` against the Neon database, using a WebSocket connection. - For more fine-grained control: - - Run `pnpm neon:ws:libquery` to test using `libquery` - - Run `pnpm neon:ws:client` to test using `@prisma/client` -- Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. - For more fine-grained control: - - Run `pnpm neon:http:libquery` to test using `libquery` - - Run `pnpm neon:http:client` to test using `@prisma/client` - -### Pg - -Start database via `docker compose up postgres15` in `/docker`. - -In the current directory: -- Run `pnpm prisma:pg` to push the Prisma schema and insert the test data. -- Run `pnpm pg` to run smoke tests using `libquery` against the PostgreSQL database, using `pg` - For more fine-grained control: - - Run `pnpm pg:libquery` to test using `libquery` - - Run `pnpm pg:client` to test using `@prisma/client` - -### Libsql - -In the current directory: -- Run `pnpm prisma:libsql` to push the Prisma schema and insert the test data. -- Run `pnpm libsql` to run smoke tests using `libquery` against the SQLite database, using `libSQL` - For more fine-grained control: - - Run `pnpm libsql:libquery` to test using `libquery` - - Run `pnpm libsql:client` to test using `@prisma/client` \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json deleted file mode 100644 index 31362c1cc873..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "@prisma/driver-adapters-smoke-tests-js", - "private": true, - "type": "module", - "version": "5.4.0", - "description": "", - "scripts": { - "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", - "prisma:db:execute:postgres": "prisma db execute --schema ./prisma/postgres/schema.prisma --file ./prisma/postgres/commands/type_test/insert.sql", - "prisma:studio:postgres": "prisma studio --schema ./prisma/postgres/schema.prisma", - "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", - "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", - "prisma:db:push:sqlite": "prisma db push --schema ./prisma/sqlite/schema.prisma --force-reset", - "prisma:db:execute:sqlite": "prisma db execute --schema ./prisma/sqlite/schema.prisma --file ./prisma/sqlite/commands/type_test/insert.sql", - "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", - "prisma:neon:ws": "pnpm prisma:neon", - "prisma:neon:http": "pnpm prisma:neon", - "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.ws.test.ts", - "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.http.test.ts", - "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.ws.test.ts", - "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.http.test.ts", - "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", - "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", - "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/pg.test.ts", - "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/pg.test.ts", - "pg": "pnpm pg:libquery && pnpm pg:client", - "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/errors.test.ts", - "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", - "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", - "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/planetscale.test.ts", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", - "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client", - "prisma:libsql": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" \"pnpm prisma:db:push:sqlite && pnpm prisma:db:execute:sqlite\"", - "libsql:libquery": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/libsql.test.ts", - "libsql:client": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/libsql.test.ts", - "libsql": "pnpm libsql:libquery && pnpm libsql:client" - }, - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": true, - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "^1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.2", - "@prisma/driver-adapter-utils": "workspace:*", - "pg": "^8.11.3", - "superjson": "^1.13.1", - "undici": "^5.26.2" - }, - "devDependencies": { - "@types/node": "^20.5.1", - "@types/pg": "^8.10.2", - "cross-env": "^7.0.3", - "prisma": "5.4.2", - "tsx": "^3.12.7" - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql deleted file mode 100644 index 6641eff216b2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql +++ /dev/null @@ -1,51 +0,0 @@ -INSERT INTO type_test ( - tinyint_column, - smallint_column, - mediumint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - bit_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - year_column, - datetime_column, - timestamp_column, - json_column, - enum_column, - binary_column, - varbinary_column, - blob_column, - set_column -) VALUES ( - 127, -- tinyint - 32767, -- smallint - 8388607, -- mediumint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 1, -- bit - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - 2023, -- year - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3', -- enum - 0x4D7953514C, -- binary - 0x48656C6C6F20, -- varbinary - _binary 'binary', -- blob - 'option1,option3' -- set -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma deleted file mode 100644 index 59efb33a5594..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ /dev/null @@ -1,125 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - tinyint_column Int @db.TinyInt - tinyint_column_null Int? @db.TinyInt - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - mediumint_column Int @db.MediumInt - mediumint_column_null Int? @db.MediumInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Float - float_column_null Float? @db.Float - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - bit_column Boolean @db.Bit(1) - bit_column_null Boolean? @db.Bit(1) - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String @db.Text - text_column_null String? @db.Text - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - year_column Int @db.Year - year_column_null Int? @db.Year - datetime_column DateTime @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? - binary_column Bytes @db.Binary(64) - binary_column_null Bytes? @db.Binary(64) - varbinary_column Bytes @db.VarBinary(128) - varbinary_column_null Bytes? @db.VarBinary(128) - blob_column Bytes @db.Blob - blob_null Bytes? @db.Blob - set_column String - set_column_null String? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql deleted file mode 100644 index 170bafb9d810..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql +++ /dev/null @@ -1,35 +0,0 @@ -INSERT INTO type_test ( - smallint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - datetime_column, - timestamp_column, - json_column, - enum_column -) VALUES ( - 32767, -- smallint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3' -- enum -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma deleted file mode 100644 index 7cd31f406b9d..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ /dev/null @@ -1,117 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgres" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Real - float_column_null Float? @db.Real - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String - text_column_null String? - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - datetime_column DateTime @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? - users User[] -} - -model User { - id String @id @default(uuid()) - email String - favoriteProduct Product? @relation(fields: [productId], references: [id]) - productId String? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql deleted file mode 100644 index 014592d2fa2c..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql +++ /dev/null @@ -1,17 +0,0 @@ -INSERT INTO type_test ( - int_column, - bigint_column, - double_column, - decimal_column, - boolean_column, - text_column, - datetime_column -) VALUES ( - 2147483647, -- int - 9223372036854775807, -- bigint - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'This is a long text...', -- text - '2023-07-24 23:59:59.415' -- datetime -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql deleted file mode 100644 index 31c63d423e22..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql +++ /dev/null @@ -1,85 +0,0 @@ --- CreateTable -CREATE TABLE "type_test" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "int_column" INTEGER NOT NULL, - "int_column_null" INTEGER, - "bigint_column" BIGINT NOT NULL, - "bigint_column_null" BIGINT, - "double_column" REAL NOT NULL, - "double_column_null" REAL, - "decimal_column" DECIMAL NOT NULL, - "decimal_column_null" DECIMAL, - "boolean_column" BOOLEAN NOT NULL, - "boolean_column_null" BOOLEAN, - "text_column" TEXT NOT NULL, - "text_column_null" TEXT, - "datetime_column" DATETIME NOT NULL, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_2" ( - "id" TEXT NOT NULL PRIMARY KEY, - "datetime_column" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_3" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "bytes" BLOB NOT NULL -); - --- CreateTable -CREATE TABLE "Child" ( - "c" TEXT NOT NULL, - "c_1" TEXT NOT NULL, - "c_2" TEXT NOT NULL, - "parentId" TEXT, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "Parent" ( - "p" TEXT NOT NULL, - "p_1" TEXT NOT NULL, - "p_2" TEXT NOT NULL, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "authors" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "firstName" TEXT NOT NULL, - "lastName" TEXT NOT NULL, - "age" INTEGER NOT NULL -); - --- CreateTable -CREATE TABLE "Product" ( - "id" TEXT NOT NULL PRIMARY KEY, - "properties" TEXT NOT NULL, - "properties_null" TEXT -); - --- CreateTable -CREATE TABLE "Unique" ( - "email" TEXT NOT NULL PRIMARY KEY, -); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_key" ON "Child"("c"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_parentId_key" ON "Child"("parentId"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_1_c_2_key" ON "Child"("c_1", "c_2"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_key" ON "Parent"("p"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_1_p_2_key" ON "Parent"("p_1", "p_2"); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml deleted file mode 100644 index e5e5c4705ab0..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml +++ /dev/null @@ -1,3 +0,0 @@ -# Please do not edit this file manually -# It should be added in your version-control system (i.e. Git) -provider = "sqlite" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma deleted file mode 100644 index bde23dee66ac..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma +++ /dev/null @@ -1,79 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - double_column Float - double_column_null Float? - decimal_column Decimal - decimal_column_null Decimal? - boolean_column Boolean - boolean_column_null Boolean? - text_column String - text_column_null String? - datetime_column DateTime - datetime_column_null DateTime? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) - datetime_column_null DateTime? -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties String - properties_null String? -} - -model Unique { - email String @id -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/setup.sh b/query-engine/driver-adapters/js/smoke-test-js/setup.sh deleted file mode 100644 index 7654679db14e..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/setup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -cd .. || return -pnpm i && pnpm build -cargo build -p query-engine-node-api -cd smoke-test-js || exit -pnpm i \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts deleted file mode 100644 index b23cf2d97fb8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { describe, it } from 'node:test' -import path from 'node:path' -import assert from 'node:assert' -import { PrismaClient } from '@prisma/client' -import type { DriverAdapter } from '@prisma/driver-adapter-utils' -import { getLibQueryEnginePath } from '../libquery/util' - -export async function smokeTestClient(driverAdapter: DriverAdapter) { - const provider = driverAdapter.flavour - - const log = [ - { - emit: 'event', - level: 'query', - } as const, - ] - - const dirname = path.dirname(new URL(import.meta.url).pathname) - process.env.PRISMA_QUERY_ENGINE_LIBRARY = getLibQueryEnginePath(dirname) - - // Run twice, once with adapter and once fully without - for (const adapter of [driverAdapter, null]) { - const isUsingDriverAdapters = adapter !== null - describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { - - it('expected error (on duplicate insert) as exception thrown / promise rejected', async () => { - const prisma = new PrismaClient({ adapter, log }) - - await assert.rejects( - async () => { - const result = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) - const result2 = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) - }, - (err) => { - assert.match(err.message, /unique/i); - return true; - }, - ); - - }) - - it('batch queries', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([ - prisma.$queryRawUnsafe('SELECT 1'), - prisma.$queryRawUnsafe('SELECT 2'), - prisma.$queryRawUnsafe('SELECT 3'), - ]) - - const defaultExpectedQueries = [ - 'BEGIN', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - - const driverAdapterExpectedQueries = [ - '-- Implicit "BEGIN" query via underlying driver', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - '-- Implicit "COMMIT" query via underlying driver', - ] - - // TODO: sqlite should be here too but it's too flaky the way the test is currently written, - // only a subset of logs arrives on time (from 2 to 4 out of 5) - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, driverAdapterExpectedQueries) - } else { - assert.deepEqual(queries, defaultExpectedQueries) - } - } else if (['postgres'].includes(provider)) { - // Note: the "DEALLOCATE ALL" query is only present after "BEGIN" when using Rust Postgres with pgbouncer. - assert.deepEqual(queries.at(0), defaultExpectedQueries.at(0)) - assert.deepEqual( - queries.filter((q) => q !== 'DEALLOCATE ALL'), - defaultExpectedQueries, - ) - } - }) - - if (provider !== 'sqlite') { - it('applies isolation level when using batch $transaction', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }) - - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', '-- Implicit "BEGIN" query via underlying driver']) - } else { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) - } - } else if (['postgres'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED']) - } - - assert.deepEqual(queries.at(-1), 'COMMIT') - }) - } else { - describe('isolation levels with sqlite', () => { - it('accepts Serializable as a no-op', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'Serializable', - }) - - console.log("queries", queries) - - if (isUsingDriverAdapters) { - assert.equal(queries.at(0), '-- Implicit "BEGIN" query via underlying driver') - assert.equal(queries.at(-1), '-- Implicit "COMMIT" query via underlying driver') - } else { - assert.equal(queries.at(0), 'BEGIN') - assert.equal(queries.at(-1), 'COMMIT') - } - - assert(!queries.find((q) => q.includes('SET TRANSACTION ISOLATION LEVEL'))) - }) - - it('throws on unsupported isolation levels', async () => { - const prisma = new PrismaClient({ adapter }) - - assert.rejects( - prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }), - ) - }) - - }) - - } - - it('bytes type support', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const result = await prisma.type_test_3.create({ - data: { - bytes: Buffer.from([1, 2, 3, 4]), - }, - }) - - assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) - }) - - }) - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts deleted file mode 100644 index f216b2a02ac7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('libsql with @prisma/client', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - - if (syncUrl) { - await client.sync() - } - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts deleted file mode 100644 index 53156ac56249..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import { neon } from '@neondatabase/serverless' -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { smokeTestClient } from './client' - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const connection = neon(connectionString) - const adapter = new PrismaNeonHTTP(connection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts deleted file mode 100644 index 37b0a9088bb7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { describe } from 'node:test' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { PrismaNeon } from '@prisma/adapter-neon' -import { WebSocket } from 'undici' -import { smokeTestClient } from './client' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts deleted file mode 100644 index 99048ad3d95f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { smokeTestClient } from './client' - -describe('pg with @prisma/client', async () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts deleted file mode 100644 index 3c22b7aa3062..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('planetscale with @prisma/client', async () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts deleted file mode 100644 index bd491db289a3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts +++ /dev/null @@ -1,78 +0,0 @@ -import * as Transaction from './Transaction' - -export type JsonQuery = { - modelName?: string - action: JsonQueryAction - query: JsonFieldSelection -} - -export type JsonBatchQuery = { - batch: JsonQuery[] - transaction?: { isolationLevel?: Transaction.IsolationLevel } -} - -export type JsonQueryAction = - | 'findUnique' - | 'findUniqueOrThrow' - | 'findFirst' - | 'findFirstOrThrow' - | 'findMany' - | 'createOne' - | 'createMany' - | 'updateOne' - | 'updateMany' - | 'deleteOne' - | 'deleteMany' - | 'upsertOne' - | 'aggregate' - | 'groupBy' - | 'executeRaw' - | 'queryRaw' - | 'runCommandRaw' - | 'findRaw' - | 'aggregateRaw' - -export type JsonFieldSelection = { - arguments?: Record - selection: JsonSelectionSet -} - -export type JsonSelectionSet = { - $scalars?: boolean - $composites?: boolean -} & { - [fieldName: string]: boolean | JsonFieldSelection -} - -export type JsonArgumentValue = - | number - | string - | boolean - | null - | JsonTaggedValue - | JsonArgumentValue[] - | { [key: string]: JsonArgumentValue } - -export type DateTaggedValue = { $type: 'DateTime'; value: string } -export type DecimalTaggedValue = { $type: 'Decimal'; value: string } -export type BytesTaggedValue = { $type: 'Bytes'; value: string } -export type BigIntTaggedValue = { $type: 'BigInt'; value: string } -export type FieldRefTaggedValue = { $type: 'FieldRef'; value: { _ref: string } } -export type EnumTaggedValue = { $type: 'Enum'; value: string } -export type JsonTaggedValue = { $type: 'Json'; value: string } - -export type JsonInputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | FieldRefTaggedValue - | JsonTaggedValue - | EnumTaggedValue - -export type JsonOutputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | JsonTaggedValue diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts deleted file mode 100644 index a25b3dd26728..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineConfig } from './QueryEngine' - -export type QueryEngineInstance = { - connect(headers: string): Promise - disconnect(headers: string): Promise - /** - * @param requestStr JSON.stringified `QueryEngineRequest | QueryEngineBatchRequest` - * @param headersStr JSON.stringified `QueryEngineRequestHeaders` - */ - query(requestStr: string, headersStr: string, transactionId?: string): Promise - sdlSchema(): Promise - dmmf(traceparent: string): Promise - startTransaction(options: string, traceHeaders: string): Promise - commitTransaction(id: string, traceHeaders: string): Promise - rollbackTransaction(id: string, traceHeaders: string): Promise - metrics(options: string): Promise -} - -export interface QueryEngineConstructor { - new( - config: QueryEngineConfig, - logger: (log: string) => void, - driverAdapter?: ErrorCapturingDriverAdapter, - ): QueryEngineInstance -} - -export interface LibraryLoader { - loadLibrary(): Promise -} - -// Main -export type Library = { - QueryEngine: QueryEngineConstructor - - version: () => { - // The commit hash of the engine - commit: string - // Currently 0.1.0 (Set in Cargo.toml) - version: string - } - /** - * This returns a string representation of `DMMF.Document` - */ - dmmf: (datamodel: string) => Promise -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts deleted file mode 100644 index 5bab74493dee..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { JsonBatchQuery, JsonQuery } from './JsonProtocol' -import * as Transaction from './Transaction' - -// Events -export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent - -export type QueryEngineLogEvent = { - level: string - module_path: string - message: string - span?: boolean -} - -export type QueryEngineQueryEvent = { - level: 'info' - module_path: string - query: string - item_type: 'query' - params: string - duration_ms: string - result: string -} - -export type QueryEnginePanicEvent = { - level: 'error' - module_path: string - message: 'PANIC' - reason: string - file: string - line: string - column: string -} - -// Configuration -export type QueryEngineLogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'off' - -export type QueryEngineTelemetry = { - enabled: Boolean - endpoint: string -} - -export type GraphQLQuery = { - query: string - variables: object -} - -export type EngineProtocol = 'graphql' | 'json' -export type EngineQuery = GraphQLQuery | JsonQuery - -export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] - -export type QueryEngineConfig = { - // TODO rename datamodel here and other places - datamodel: string - configDir: string - logQueries: boolean - ignoreEnvVarErrors: boolean - datasourceOverrides?: Record - env: Record - logLevel: QueryEngineLogLevel - telemetry?: QueryEngineTelemetry - engineProtocol: EngineProtocol -} - -// Errors -export type SyncRustError = { - is_panic: boolean - message: string - meta: { - full_error: string - } - error_code: string -} - -export type RustRequestError = { - is_panic: boolean - message: string - backtrace: string -} - -export type QueryEngineResult = { - data: T - elapsed: number -} - -export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery - -export type QueryEngineBatchGraphQLRequest = { - batch: QueryEngineRequest[] - transaction?: boolean - isolationLevel?: Transaction.IsolationLevel -} - -export type QueryEngineRequest = { - query: string - variables: Object -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts deleted file mode 100644 index 1c5786cc66da..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts +++ /dev/null @@ -1,35 +0,0 @@ -export enum IsolationLevel { - ReadUncommitted = 'ReadUncommitted', - ReadCommitted = 'ReadCommitted', - RepeatableRead = 'RepeatableRead', - Snapshot = 'Snapshot', - Serializable = 'Serializable', -} - -/** - * maxWait ?= 2000 - * timeout ?= 5000 - */ -export type Options = { - maxWait?: number - timeout?: number - isolationLevel?: IsolationLevel -} - -export type InteractiveTransactionInfo = { - /** - * Transaction ID returned by the query engine. - */ - id: string - - /** - * Arbitrary payload the meaning of which depends on the `Engine` implementation. - * For example, `DataProxyEngine` needs to associate different API endpoints with transactions. - * In `LibraryEngine` and `BinaryEngine` it is currently not used. - */ - payload: Payload -} - -export type TransactionHeaders = { - traceparent?: string -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts deleted file mode 100644 index 13ac5cd9ec81..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { bindAdapter } from '@prisma/driver-adapter-utils' -import test, { after, before, describe } from 'node:test' -import { createQueryFn, initQueryEngine, throwAdapterError } from './util' -import assert from 'node:assert' - -const fakeAdapter = bindAdapter({ - flavour: 'postgres', - startTransaction() { - throw new Error('Error in startTransaction') - }, - - queryRaw() { - throw new Error('Error in queryRaw') - }, - - executeRaw() { - throw new Error('Error in executeRaw') - }, - close() { - return Promise.resolve({ ok: true, value: undefined }) - }, -}) - -const engine = initQueryEngine(fakeAdapter, '../../prisma/postgres/schema.prisma') -const doQuery = createQueryFn(engine, fakeAdapter) - -const startTransaction = async () => { - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const res = JSON.parse(await engine.startTransaction(JSON.stringify(args), '{}')) - if (res['error_code']) { - throwAdapterError(res, fakeAdapter) - } -} - -describe('errors propagation', () => { - before(async () => { - await engine.connect('{}') - }) - after(async () => { - await engine.disconnect('{}') - }) - - test('works for queries', async () => { - await assert.rejects( - doQuery({ - modelName: 'Product', - action: 'findMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }), - /Error in queryRaw/, - ) - }) - - test('works for executeRaw', async () => { - await assert.rejects( - doQuery({ - action: 'executeRaw', - query: { - arguments: { - query: 'SELECT 1', - parameters: '[]', - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in executeRaw/, - ) - }) - - test('works with implicit transaction', async () => { - await assert.rejects( - doQuery({ - modelName: 'User', - action: 'createOne', - query: { - arguments: { - data: { - email: 'user@example.com', - favoriteProduct: { - create: { - properties: {}, - }, - }, - }, - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in startTransaction/, - ) - }) - - test('works with explicit transaction', async () => { - await assert.rejects(startTransaction(), /Error in startTransaction/) - }) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts deleted file mode 100644 index c50ad3e257ab..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ /dev/null @@ -1,722 +0,0 @@ -import { describe, it, before, after } from 'node:test' -import assert from 'node:assert' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineInstance } from '../engines/types/Library' -import { createQueryFn, initQueryEngine } from './util' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function smokeTestLibquery( - adapter: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, - supportsTransactions = true, -) { - const engine = initQueryEngine(adapter, prismaSchemaRelativePath) - const flavour = adapter.flavour - - const doQuery = createQueryFn(engine, adapter) - - describe('using libquery with Driver Adapters', () => { - before(async () => { - await engine.connect('trace') - }) - - after(async () => { - await engine.disconnect('trace') - await adapter.close() - }) - - it('create JSON values', async () => { - const json = JSON.stringify({ - foo: 'bar', - baz: 1, - }) - - const created = await doQuery({ - action: 'createOne', - modelName: 'Product', - query: { - arguments: { - data: { - properties: json, - properties_null: null, - }, - }, - selection: { - properties: true, - }, - }, - }) - - if (flavour !== 'sqlite') { - assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') - } - - console.log('[nodejs] created', JSON.stringify(created, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'Product', - query: { - selection: { - id: true, - properties: true, - properties_null: true, - }, - }, - }) - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'Product', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create with autoincrement', async () => { - await doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - }) - - it('create non scalar types', async () => { - const create = await doQuery({ - action: 'createOne', - modelName: 'type_test_2', - query: { - arguments: { - data: {}, - }, - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - }, - }) - - console.log('[nodejs] create', JSON.stringify(create, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_2', - query: { - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - arguments: { - where: {}, - }, - }, - }) - - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'type_test_2', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create/delete parent and child', async () => { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create explicit transaction', async () => { - if (!supportsTransactions) return - - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') - const tx_id = JSON.parse(startResponse).id - console.log('[nodejs] transaction id', tx_id) - assert.notStrictEqual(tx_id, undefined) - - await doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - }) - - it('expected error (on duplicate insert) as json result (not throwing error)', async () => { - await doQuery({ - modelName: 'Unique', - action: 'deleteMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }) - - await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - - const promise = doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - - const result = await promise - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) - assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') - }) - - describe('read scalar and non scalar types', () => { - if (['mysql'].includes(flavour)) { - it('mysql', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['postgres'].includes(flavour)) { - it('postgres', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['sqlite'].includes(flavour)) { - it('sqlite', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - int_column: true, - bigint_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - text_column: true, - datetime_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else { - throw new Error(`Missing test for flavour ${flavour}`) - } - }) - - it('write and read back bytes', async () => { - const createResultSet = await doQuery({ - action: 'createOne', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - arguments: { - data: { - bytes: { - $type: 'Bytes', - value: 'AQID', - }, - }, - }, - }, - }) - console.log('[nodejs] createOne resultSet:') - console.dir(createResultSet, { depth: Infinity }) - - const findResultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet:') - console.dir(findResultSet, { depth: Infinity }) - }) - }) -} - -class SmokeTest { - readonly flavour: ErrorCapturingDriverAdapter['flavour'] - - constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { - this.flavour = connector.flavour - } - - async testFindManyTypeTest() { - await this.testFindManyTypeTestMySQL() - await this.testFindManyTypeTestPostgres() - } - - private async testFindManyTypeTestMySQL() { - if (this.flavour !== 'mysql') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - private async testFindManyTypeTestPostgres() { - if (this.flavour !== 'postgres') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - async createAutoIncrement() { - await this.doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await this.doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - } - - async testCreateAndDeleteChildParent() { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - const resultDeleteMany = await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - console.log('[nodejs] resultDeleteMany', JSON.stringify(resultDeleteMany, null, 2)) - } - - async testTransaction() { - const startResponse = await this.engine.startTransaction( - JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), - 'trace', - ) - - const tx_id = JSON.parse(startResponse).id - - console.log('[nodejs] transaction id', tx_id) - await this.doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await this.engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - } - - private async doQuery(query: JsonQuery, tx_id?: string) { - const result = await this.engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error - if (error.error_code === 'P2036') { - const jsError = this.connector.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } - } - return parsedResult - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts deleted file mode 100644 index 7f0a1038ec74..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('libsql', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - const driverAdapter = bindAdapter(adapter) - - if (syncUrl) { - await client.sync() - } - - smokeTestLibquery(driverAdapter, '../../prisma/sqlite/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts deleted file mode 100644 index 02872b885fe3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { neon } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('neon (HTTP)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const neonConnection = neon(connectionString) - - const adapter = new PrismaNeonHTTP(neonConnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma', false) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts deleted file mode 100644 index 54765f5961ba..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { PrismaNeon } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { WebSocket } from 'undici' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon (WebSocket)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts deleted file mode 100644 index 9b79e7284be8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('pg', () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts deleted file mode 100644 index bb7c81805adc..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('planetscale', () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts deleted file mode 100644 index 783eb76759d2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ /dev/null @@ -1,71 +0,0 @@ -import path from 'node:path' -import os from 'node:os' -import fs from 'node:fs' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import { Library, QueryEngineInstance } from '../engines/types/Library' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function initQueryEngine( - driver: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, -): QueryEngineInstance { - const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = getLibQueryEnginePath(dirname) - - const schemaPath = path.join(dirname, prismaSchemaRelativePath) - - console.log('[nodejs] read Prisma schema from', schemaPath) - - const libqueryEngine = { exports: {} as unknown as Library } - // @ts-ignore - process.dlopen(libqueryEngine, libQueryEnginePath) - - const QueryEngine = libqueryEngine.exports.QueryEngine - - const queryEngineOptions = { - datamodel: fs.readFileSync(schemaPath, 'utf-8'), - configDir: '.', - engineProtocol: 'json' as const, - logLevel: 'info' as const, - logQueries: false, - env: process.env, - ignoreEnvVarErrors: false, - } - - const logCallback = (...args) => { - console.log(args) - } - - const engine = new QueryEngine(queryEngineOptions, logCallback, driver) - - return engine -} - -export function getLibQueryEnginePath(dirname: String) { - // I assume nobody will run this on Windows ¯\_(ツ)_/¯ - const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' - return path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) -} - -export function createQueryFn(engine: QueryEngineInstance, adapter: ErrorCapturingDriverAdapter) { - return async function doQuery(query: JsonQuery, tx_id?: string) { - const result = await engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - throwAdapterError(parsedResult.errors[0]?.user_facing_error, adapter) - } - return parsedResult - } -} - -export function throwAdapterError(error: any, adapter: ErrorCapturingDriverAdapter) { - if (error.error_code === 'P2036') { - const jsError = adapter.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json b/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/version.sh b/query-engine/driver-adapters/js/version.sh deleted file mode 100755 index 8f592c0e197c..000000000000 --- a/query-engine/driver-adapters/js/version.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# Usage: `./version.sh x.y.z` will set the `x.y.z` to every package in the monorepo. - -target_version=$1 -package_dirs=$(pnpm -r list -r --depth -1 --json | jq -r '.[] | .path' | tail -n +2) - -# Iterate through each package directory -for package_dir in $package_dirs; do - # Check if the directory exists - if [ -d "$package_dir" ]; then - # Set the target version using pnpm - (cd "$package_dir" && pnpm version "$target_version" --no-git-tag-version --allow-same-version) - fi -done diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index c43f66a81e72..53133e037b6f 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -31,7 +31,6 @@ pub struct SqliteErrorDef { #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception GenericJs { @@ -64,7 +63,6 @@ impl From for QuaintError { } /// Wrapper for JS-side result type -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum JsResult where T: FromNapiValue,