From 9bc500fa25bb22ac16a9e8efb45d4f89ba716209 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Tue, 24 Oct 2023 13:36:09 +0200 Subject: [PATCH 01/26] Promote connector-test-kit to the driver-adapters directory and remove js --- query-engine/connector-test-kit-rs/README.md | 7 +- .../query-tests-setup/src/config.rs | 2 +- .../dist/index.d.mts | 2 + .../dist/index.d.ts | 2 + .../connector-test-kit-executor/dist/index.js | 250 +++ .../dist/index.mjs | 225 +++ .../node_modules/@libsql/client | 1 + .../node_modules/@neondatabase/serverless | 1 + .../node_modules/@planetscale/database | 1 + .../node_modules/@prisma/adapter-libsql | 1 + .../node_modules/@prisma/adapter-neon | 1 + .../node_modules/@prisma/adapter-pg | 1 + .../node_modules/@prisma/adapter-planetscale | 1 + .../node_modules/@prisma/driver-adapter-utils | 1 + .../node_modules/@types/pg | 1 + .../node_modules/pg | 1 + .../node_modules/undici | 1 + .../connector-test-kit-executor/package.json | 0 .../script/start_node.sh | 0 .../src/engines/JsonProtocol.ts | 0 .../src/engines/Library.ts | 0 .../src/engines/QueryEngine.ts | 0 .../src/engines/Transaction.ts | 0 .../connector-test-kit-executor/src/index.ts | 0 .../src/jsonRpc.ts | 0 .../connector-test-kit-executor/src/qe.ts | 0 query-engine/driver-adapters/js/.gitignore | 44 - query-engine/driver-adapters/js/.npmrc | 2 - query-engine/driver-adapters/js/.nvmrc | 1 - .../driver-adapters/js/.prettierrc.yml | 5 - query-engine/driver-adapters/js/README.md | 42 - .../js/adapter-libsql/.gitignore | 1 - .../js/adapter-libsql/README.md | 95 - .../js/adapter-libsql/package.json | 31 - .../js/adapter-libsql/src/conversion.ts | 161 -- .../js/adapter-libsql/src/index.ts | 1 - .../js/adapter-libsql/src/libsql.ts | 171 -- .../js/adapter-libsql/tests/types.test.mts | 151 -- .../js/adapter-libsql/tsconfig.build.json | 6 - .../js/adapter-libsql/tsconfig.json | 3 - .../driver-adapters/js/adapter-neon/README.md | 71 - .../js/adapter-neon/package.json | 30 - .../js/adapter-neon/src/conversion.ts | 286 --- .../js/adapter-neon/src/index.ts | 1 - .../js/adapter-neon/src/neon.ts | 165 -- .../js/adapter-neon/tsconfig.build.json | 6 - .../js/adapter-neon/tsconfig.json | 3 - .../driver-adapters/js/adapter-pg/README.md | 3 - .../js/adapter-pg/package.json | 31 - .../js/adapter-pg/src/conversion.ts | 286 --- .../js/adapter-pg/src/index.ts | 1 - .../driver-adapters/js/adapter-pg/src/pg.ts | 138 -- .../js/adapter-planetscale/README.md | 71 - .../js/adapter-planetscale/package.json | 29 - .../js/adapter-planetscale/src/conversion.ts | 98 -- .../js/adapter-planetscale/src/deferred.ts | 13 - .../js/adapter-planetscale/src/index.ts | 1 - .../js/adapter-planetscale/src/planetscale.ts | 181 -- .../adapter-planetscale/tsconfig.build.json | 6 - .../js/adapter-planetscale/tsconfig.json | 3 - .../js/driver-adapter-utils/README.md | 3 - .../js/driver-adapter-utils/package.json | 26 - .../js/driver-adapter-utils/src/binder.ts | 80 - .../js/driver-adapter-utils/src/const.ts | 48 - .../js/driver-adapter-utils/src/debug.ts | 3 - .../js/driver-adapter-utils/src/index.ts | 5 - .../js/driver-adapter-utils/src/result.ts | 41 - .../js/driver-adapter-utils/src/types.ts | 132 -- .../driver-adapter-utils/tsconfig.build.json | 6 - .../js/driver-adapter-utils/tsconfig.json | 3 - query-engine/driver-adapters/js/package.json | 23 - .../driver-adapters/js/pnpm-lock.yaml | 1554 ----------------- .../driver-adapters/js/pnpm-workspace.yaml | 8 - .../js/smoke-test-js/.envrc.example | 26 - .../js/smoke-test-js/.gitignore | 4 - .../js/smoke-test-js/README.md | 79 - .../js/smoke-test-js/package.json | 67 - .../mysql/commands/type_test/insert.sql | 51 - .../smoke-test-js/prisma/mysql/schema.prisma | 125 -- .../postgres/commands/type_test/insert.sql | 35 - .../prisma/postgres/schema.prisma | 117 -- .../sqlite/commands/type_test/insert.sql | 17 - .../20230915202554_init/migration.sql | 85 - .../sqlite/migrations/migration_lock.toml | 3 - .../smoke-test-js/prisma/sqlite/schema.prisma | 79 - .../driver-adapters/js/smoke-test-js/setup.sh | 7 - .../js/smoke-test-js/src/client/client.ts | 164 -- .../smoke-test-js/src/client/libsql.test.ts | 20 - .../src/client/neon.http.test.ts | 13 - .../smoke-test-js/src/client/neon.ws.test.ts | 16 - .../js/smoke-test-js/src/client/pg.test.ts | 13 - .../src/client/planetscale.test.ts | 13 - .../src/engines/types/JsonProtocol.ts | 78 - .../src/engines/types/Library.ts | 46 - .../src/engines/types/QueryEngine.ts | 97 - .../src/engines/types/Transaction.ts | 35 - .../smoke-test-js/src/libquery/errors.test.ts | 105 -- .../js/smoke-test-js/src/libquery/libquery.ts | 722 -------- .../smoke-test-js/src/libquery/libsql.test.ts | 22 - .../src/libquery/neon.http.test.ts | 16 - .../src/libquery/neon.ws.test.ts | 18 - .../js/smoke-test-js/src/libquery/pg.test.ts | 15 - .../src/libquery/planetscale.test.ts | 15 - .../js/smoke-test-js/src/libquery/util.ts | 71 - .../js/smoke-test-js/tsconfig.json | 3 - query-engine/driver-adapters/js/tsconfig.json | 23 - query-engine/driver-adapters/js/version.sh | 15 - query-engine/driver-adapters/src/result.rs | 2 - 108 files changed, 495 insertions(+), 6290 deletions(-) create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.mts create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.ts create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/dist/index.js create mode 100755 query-engine/driver-adapters/connector-test-kit-executor/dist/index.mjs create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@libsql/client create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@neondatabase/serverless create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@planetscale/database create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-libsql create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-neon create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-pg create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-planetscale create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/driver-adapter-utils create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@types/pg create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/pg create mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/undici rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/package.json (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/script/start_node.sh (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/JsonProtocol.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/Library.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/QueryEngine.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/engines/Transaction.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/index.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/jsonRpc.ts (100%) rename query-engine/driver-adapters/{js => }/connector-test-kit-executor/src/qe.ts (100%) delete mode 100644 query-engine/driver-adapters/js/.gitignore delete mode 100644 query-engine/driver-adapters/js/.npmrc delete mode 100644 query-engine/driver-adapters/js/.nvmrc delete mode 100644 query-engine/driver-adapters/js/.prettierrc.yml delete mode 100644 query-engine/driver-adapters/js/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/.gitignore delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-libsql/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-neon/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/src/neon.ts delete mode 100644 query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-neon/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/adapter-pg/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-pg/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-pg/src/pg.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/README.md delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/package.json delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/index.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/README.md delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/package.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json delete mode 100644 query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/package.json delete mode 100644 query-engine/driver-adapters/js/pnpm-lock.yaml delete mode 100644 query-engine/driver-adapters/js/pnpm-workspace.yaml delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/.envrc.example delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/.gitignore delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/README.md delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/package.json delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/setup.sh delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts delete mode 100644 query-engine/driver-adapters/js/smoke-test-js/tsconfig.json delete mode 100644 query-engine/driver-adapters/js/tsconfig.json delete mode 100755 query-engine/driver-adapters/js/version.sh diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 2c849a2aa985..33d6fecb80ee 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -66,8 +66,9 @@ If you choose to set up the databases yourself, please note that the connection #### Running tests through driver adapters -The query engine is able to delegate query execution to javascript through [driver adapters](query-engine/driver-adapters/js/README.md). -This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs drivers the code that actually communicates with the databases. +The query engine is able to delegate query execution to javascript through driver adapters. +This means that instead of drivers being implemented in Rust, it's a layer of adapters over NodeJs +drivers the code that actually communicates with the databases. See [`adapter-*` packages in prisma/prisma](https://github.com/prisma/prisma/tree/main/packages) To run tests through a driver adapters, you should also configure the following environment variables: @@ -78,7 +79,7 @@ To run tests through a driver adapters, you should also configure the following Example: ```shell -export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" +export EXTERNAL_TEST_EXECUTOR="$WORKSPACE_ROOT/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh" export DRIVER_ADAPTER=neon export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' ```` diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index b27f27406e5c..4af4e763298a 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -159,7 +159,7 @@ impl TestConfig { /// and the workspace_root is set, then use the default external test executor. fn fill_defaults(&mut self) { const DEFAULT_TEST_EXECUTOR: &str = - "query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh"; + "query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh"; if self .external_test_executor diff --git a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.mts b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.mts new file mode 100644 index 000000000000..c9247d453553 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.mts @@ -0,0 +1,2 @@ + +export { } diff --git a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.ts b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.ts new file mode 100644 index 000000000000..c9247d453553 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.ts @@ -0,0 +1,2 @@ + +export { } diff --git a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.js b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.js new file mode 100644 index 000000000000..14278e014d51 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.js @@ -0,0 +1,250 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); + +// src/qe.ts +var os = __toESM(require("os")); +var path = __toESM(require("path")); +var import_meta = {}; +function initQueryEngine(adapter, datamodel, queryLogCallback, debug2) { + const libExt = os.platform() === "darwin" ? "dylib" : "so"; + const dirname2 = path.dirname(new URL(import_meta.url).pathname); + const libQueryEnginePath = path.join(dirname2, `../../../../../target/debug/libquery_engine.${libExt}`); + const libqueryEngine = { exports: {} }; + process.dlopen(libqueryEngine, libQueryEnginePath); + const QueryEngine = libqueryEngine.exports.QueryEngine; + const queryEngineOptions = { + datamodel, + configDir: ".", + engineProtocol: "json", + logLevel: process.env["RUST_LOG"] ?? "info", + logQueries: true, + env: process.env, + ignoreEnvVarErrors: false + }; + const logCallback = (event) => { + const parsed = JSON.parse(event); + if (parsed.is_query) { + queryLogCallback(parsed.query); + } + debug2(parsed); + }; + return new QueryEngine(queryEngineOptions, logCallback, adapter); +} + +// src/index.ts +var readline = __toESM(require("readline")); +var import_pg = __toESM(require("pg")); +var prismaPg = __toESM(require("@prisma/adapter-pg")); +var import_serverless = require("@neondatabase/serverless"); +var import_undici = require("undici"); +var prismaNeon = __toESM(require("@prisma/adapter-neon")); +var import_client = require("@libsql/client"); +var import_adapter_libsql = require("@prisma/adapter-libsql"); +var import_database = require("@planetscale/database"); +var import_adapter_planetscale = require("@prisma/adapter-planetscale"); +var import_driver_adapter_utils = require("@prisma/driver-adapter-utils"); +var SUPPORTED_ADAPTERS = { + "pg": pgAdapter, + "neon:ws": neonWsAdapter, + "libsql": libsqlAdapter, + "planetscale": planetscaleAdapter +}; +var debug = (() => { + if ((process.env.LOG_LEVEL ?? "").toLowerCase() != "debug") { + return (...args) => { + }; + } + return (...args) => { + console.error("[nodejs] DEBUG:", ...args); + }; +})(); +var err = (...args) => console.error("[nodejs] ERROR:", ...args); +async function main() { + const iface = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + iface.on("line", async (line) => { + try { + const request = JSON.parse(line); + debug(`Got a request: ${line}`); + try { + const response = await handleRequest(request.method, request.params); + respondOk(request.id, response); + } catch (err2) { + debug("[nodejs] Error from request handler: ", err2); + respondErr(request.id, { + code: 1, + message: err2.toString() + }); + } + } catch (err2) { + debug("Received non-json line: ", line); + } + }); +} +var state = {}; +async function handleRequest(method, params) { + switch (method) { + case "initializeSchema": { + const castParams = params; + const logs = []; + const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { + logs.push(log); + }); + await engine.connect(""); + state[castParams.schemaId] = { + engine, + adapter, + logs + }; + return null; + } + case "query": { + debug("Got `query`", params); + const castParams = params; + const engine = state[castParams.schemaId].engine; + const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId); + const parsedResult = JSON.parse(result); + if (parsedResult.errors) { + const error = parsedResult.errors[0]?.user_facing_error; + if (error.error_code === "P2036") { + const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id); + if (!jsError) { + err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`); + } else { + err("got error response from the engine caused by the driver: ", jsError); + } + } + } + debug("got response from engine: ", result); + return result; + } + case "startTx": { + debug("Got `startTx", params); + const { schemaId, options } = params; + const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), ""); + return JSON.parse(result); + } + case "commitTx": { + debug("Got `commitTx", params); + const { schemaId, txId } = params; + const result = await state[schemaId].engine.commitTransaction(txId, "{}"); + return JSON.parse(result); + } + case "rollbackTx": { + debug("Got `rollbackTx", params); + const { schemaId, txId } = params; + const result = await state[schemaId].engine.rollbackTransaction(txId, "{}"); + return JSON.parse(result); + } + case "teardown": { + debug("Got `teardown", params); + const castParams = params; + await state[castParams.schemaId].engine.disconnect(""); + delete state[castParams.schemaId]; + return {}; + } + case "getLogs": { + const castParams = params; + return state[castParams.schemaId].logs; + } + default: { + throw new Error(`Unknown method: \`${method}\``); + } + } +} +function respondErr(requestId, error) { + const msg = { + jsonrpc: "2.0", + id: requestId, + error + }; + console.log(JSON.stringify(msg)); +} +function respondOk(requestId, payload) { + const msg = { + jsonrpc: "2.0", + id: requestId, + result: payload + }; + console.log(JSON.stringify(msg)); +} +async function initQe(url, prismaSchema, logCallback) { + const adapter = await adapterFromEnv(url); + const errorCapturingAdapter = (0, import_driver_adapter_utils.bindAdapter)(adapter); + const engineInstance = initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback, debug); + return [engineInstance, errorCapturingAdapter]; +} +async function adapterFromEnv(url) { + const adapter = process.env.DRIVER_ADAPTER ?? ""; + if (adapter == "") { + throw new Error("DRIVER_ADAPTER is not defined or empty."); + } + if (!(adapter in SUPPORTED_ADAPTERS)) { + throw new Error(`Unsupported driver adapter: ${adapter}`); + } + return await SUPPORTED_ADAPTERS[adapter](url); +} +function postgres_options(url) { + let args = { connectionString: url }; + const schemaName = new URL(url).searchParams.get("schema"); + if (schemaName != null) { + args.options = `--search_path="${schemaName}"`; + } + return args; +} +async function pgAdapter(url) { + const pool = new import_pg.default.Pool(postgres_options(url)); + return new prismaPg.PrismaPg(pool); +} +async function neonWsAdapter(url) { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || "{}").proxyUrl ?? ""; + if (proxyURL == "") { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for neon adapter."); + } + import_serverless.neonConfig.wsProxy = () => proxyURL; + import_serverless.neonConfig.webSocketConstructor = import_undici.WebSocket; + import_serverless.neonConfig.useSecureWebSocket = false; + import_serverless.neonConfig.pipelineConnect = false; + const pool = new import_serverless.Pool(postgres_options(url)); + return new prismaNeon.PrismaNeon(pool); +} +async function libsqlAdapter(url) { + const libsql = (0, import_client.createClient)({ url, intMode: "bigint" }); + return new import_adapter_libsql.PrismaLibSQL(libsql); +} +async function planetscaleAdapter(url) { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || "{}").proxyUrl ?? ""; + if (proxyURL == "") { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); + } + const connection = (0, import_database.connect)({ + url: proxyURL, + fetch: import_undici.fetch + }); + return new import_adapter_planetscale.PrismaPlanetScale(connection); +} +main().catch(err); diff --git a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.mjs b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.mjs new file mode 100755 index 000000000000..72a403d687a2 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.mjs @@ -0,0 +1,225 @@ +// src/qe.ts +import * as os from "os"; +import * as path from "path"; +function initQueryEngine(adapter, datamodel, queryLogCallback, debug2) { + const libExt = os.platform() === "darwin" ? "dylib" : "so"; + const dirname2 = path.dirname(new URL(import.meta.url).pathname); + const libQueryEnginePath = path.join(dirname2, `../../../../../target/debug/libquery_engine.${libExt}`); + const libqueryEngine = { exports: {} }; + process.dlopen(libqueryEngine, libQueryEnginePath); + const QueryEngine = libqueryEngine.exports.QueryEngine; + const queryEngineOptions = { + datamodel, + configDir: ".", + engineProtocol: "json", + logLevel: process.env["RUST_LOG"] ?? "info", + logQueries: true, + env: process.env, + ignoreEnvVarErrors: false + }; + const logCallback = (event) => { + const parsed = JSON.parse(event); + if (parsed.is_query) { + queryLogCallback(parsed.query); + } + debug2(parsed); + }; + return new QueryEngine(queryEngineOptions, logCallback, adapter); +} + +// src/index.ts +import * as readline from "readline"; +import pgDriver from "pg"; +import * as prismaPg from "@prisma/adapter-pg"; +import { Pool as NeonPool, neonConfig } from "@neondatabase/serverless"; +import { fetch, WebSocket } from "undici"; +import * as prismaNeon from "@prisma/adapter-neon"; +import { createClient } from "@libsql/client"; +import { PrismaLibSQL } from "@prisma/adapter-libsql"; +import { connect as planetscaleConnect } from "@planetscale/database"; +import { PrismaPlanetScale } from "@prisma/adapter-planetscale"; +import { bindAdapter } from "@prisma/driver-adapter-utils"; +var SUPPORTED_ADAPTERS = { + "pg": pgAdapter, + "neon:ws": neonWsAdapter, + "libsql": libsqlAdapter, + "planetscale": planetscaleAdapter +}; +var debug = (() => { + if ((process.env.LOG_LEVEL ?? "").toLowerCase() != "debug") { + return (...args) => { + }; + } + return (...args) => { + console.error("[nodejs] DEBUG:", ...args); + }; +})(); +var err = (...args) => console.error("[nodejs] ERROR:", ...args); +async function main() { + const iface = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + iface.on("line", async (line) => { + try { + const request = JSON.parse(line); + debug(`Got a request: ${line}`); + try { + const response = await handleRequest(request.method, request.params); + respondOk(request.id, response); + } catch (err2) { + debug("[nodejs] Error from request handler: ", err2); + respondErr(request.id, { + code: 1, + message: err2.toString() + }); + } + } catch (err2) { + debug("Received non-json line: ", line); + } + }); +} +var state = {}; +async function handleRequest(method, params) { + switch (method) { + case "initializeSchema": { + const castParams = params; + const logs = []; + const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { + logs.push(log); + }); + await engine.connect(""); + state[castParams.schemaId] = { + engine, + adapter, + logs + }; + return null; + } + case "query": { + debug("Got `query`", params); + const castParams = params; + const engine = state[castParams.schemaId].engine; + const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId); + const parsedResult = JSON.parse(result); + if (parsedResult.errors) { + const error = parsedResult.errors[0]?.user_facing_error; + if (error.error_code === "P2036") { + const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id); + if (!jsError) { + err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`); + } else { + err("got error response from the engine caused by the driver: ", jsError); + } + } + } + debug("got response from engine: ", result); + return result; + } + case "startTx": { + debug("Got `startTx", params); + const { schemaId, options } = params; + const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), ""); + return JSON.parse(result); + } + case "commitTx": { + debug("Got `commitTx", params); + const { schemaId, txId } = params; + const result = await state[schemaId].engine.commitTransaction(txId, "{}"); + return JSON.parse(result); + } + case "rollbackTx": { + debug("Got `rollbackTx", params); + const { schemaId, txId } = params; + const result = await state[schemaId].engine.rollbackTransaction(txId, "{}"); + return JSON.parse(result); + } + case "teardown": { + debug("Got `teardown", params); + const castParams = params; + await state[castParams.schemaId].engine.disconnect(""); + delete state[castParams.schemaId]; + return {}; + } + case "getLogs": { + const castParams = params; + return state[castParams.schemaId].logs; + } + default: { + throw new Error(`Unknown method: \`${method}\``); + } + } +} +function respondErr(requestId, error) { + const msg = { + jsonrpc: "2.0", + id: requestId, + error + }; + console.log(JSON.stringify(msg)); +} +function respondOk(requestId, payload) { + const msg = { + jsonrpc: "2.0", + id: requestId, + result: payload + }; + console.log(JSON.stringify(msg)); +} +async function initQe(url, prismaSchema, logCallback) { + const adapter = await adapterFromEnv(url); + const errorCapturingAdapter = bindAdapter(adapter); + const engineInstance = initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback, debug); + return [engineInstance, errorCapturingAdapter]; +} +async function adapterFromEnv(url) { + const adapter = process.env.DRIVER_ADAPTER ?? ""; + if (adapter == "") { + throw new Error("DRIVER_ADAPTER is not defined or empty."); + } + if (!(adapter in SUPPORTED_ADAPTERS)) { + throw new Error(`Unsupported driver adapter: ${adapter}`); + } + return await SUPPORTED_ADAPTERS[adapter](url); +} +function postgres_options(url) { + let args = { connectionString: url }; + const schemaName = new URL(url).searchParams.get("schema"); + if (schemaName != null) { + args.options = `--search_path="${schemaName}"`; + } + return args; +} +async function pgAdapter(url) { + const pool = new pgDriver.Pool(postgres_options(url)); + return new prismaPg.PrismaPg(pool); +} +async function neonWsAdapter(url) { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || "{}").proxyUrl ?? ""; + if (proxyURL == "") { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for neon adapter."); + } + neonConfig.wsProxy = () => proxyURL; + neonConfig.webSocketConstructor = WebSocket; + neonConfig.useSecureWebSocket = false; + neonConfig.pipelineConnect = false; + const pool = new NeonPool(postgres_options(url)); + return new prismaNeon.PrismaNeon(pool); +} +async function libsqlAdapter(url) { + const libsql = createClient({ url, intMode: "bigint" }); + return new PrismaLibSQL(libsql); +} +async function planetscaleAdapter(url) { + const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || "{}").proxyUrl ?? ""; + if (proxyURL == "") { + throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); + } + const connection = planetscaleConnect({ + url: proxyURL, + fetch + }); + return new PrismaPlanetScale(connection); +} +main().catch(err); diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@libsql/client b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@libsql/client new file mode 120000 index 000000000000..e005c95cbe57 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@libsql/client @@ -0,0 +1 @@ +../../../node_modules/.pnpm/@libsql+client@0.3.5/node_modules/@libsql/client \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@neondatabase/serverless b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@neondatabase/serverless new file mode 120000 index 000000000000..1b8b5360d9d7 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@neondatabase/serverless @@ -0,0 +1 @@ +../../../node_modules/.pnpm/@neondatabase+serverless@0.6.0/node_modules/@neondatabase/serverless \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@planetscale/database b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@planetscale/database new file mode 120000 index 000000000000..b7e0d5f2efda --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@planetscale/database @@ -0,0 +1 @@ +../../../node_modules/.pnpm/@planetscale+database@1.11.0/node_modules/@planetscale/database \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-libsql b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-libsql new file mode 120000 index 000000000000..2f6708d276ca --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-libsql @@ -0,0 +1 @@ +../../../adapter-libsql \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-neon b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-neon new file mode 120000 index 000000000000..f2ca2a7c3fcf --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-neon @@ -0,0 +1 @@ +../../../adapter-neon \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-pg b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-pg new file mode 120000 index 000000000000..d152ffc620d4 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-pg @@ -0,0 +1 @@ +../../../adapter-pg \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-planetscale b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-planetscale new file mode 120000 index 000000000000..936e67a0c767 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-planetscale @@ -0,0 +1 @@ +../../../adapter-planetscale \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/driver-adapter-utils b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/driver-adapter-utils new file mode 120000 index 000000000000..043d62ea22f9 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/driver-adapter-utils @@ -0,0 +1 @@ +../../../driver-adapter-utils \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@types/pg b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@types/pg new file mode 120000 index 000000000000..59ded2fc841a --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@types/pg @@ -0,0 +1 @@ +../../../node_modules/.pnpm/@types+pg@8.10.2/node_modules/@types/pg \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/pg b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/pg new file mode 120000 index 000000000000..5853d2642341 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/pg @@ -0,0 +1 @@ +../../node_modules/.pnpm/pg@8.11.3/node_modules/pg \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/undici b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/undici new file mode 120000 index 000000000000..f0e268871dfc --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/undici @@ -0,0 +1 @@ +../../node_modules/.pnpm/undici@5.23.0/node_modules/undici \ No newline at end of file diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/package.json rename to query-engine/driver-adapters/connector-test-kit-executor/package.json diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh b/query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh rename to query-engine/driver-adapters/connector-test-kit-executor/script/start_node.sh diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/JsonProtocol.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/JsonProtocol.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Library.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Library.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/QueryEngine.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/QueryEngine.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/engines/Transaction.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/engines/Transaction.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/index.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/index.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/jsonRpc.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/jsonRpc.ts diff --git a/query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts similarity index 100% rename from query-engine/driver-adapters/js/connector-test-kit-executor/src/qe.ts rename to query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts diff --git a/query-engine/driver-adapters/js/.gitignore b/query-engine/driver-adapters/js/.gitignore deleted file mode 100644 index e885963af278..000000000000 --- a/query-engine/driver-adapters/js/.gitignore +++ /dev/null @@ -1,44 +0,0 @@ -node_modules - -yarn-error.log -dist -build -tmp -pnpm-debug.log -sandbox -.DS_Store - -query-engine* -migration-engine* -schema-engine* -libquery_engine* -libquery-engine* -query_engine-windows.dll.node - -*tmp.db -dist/ -declaration/ - -*.tsbuildinfo -.prisma -.pnpm-store - -.vscode -!.vscode/launch.json.default -coverage - -.eslintcache - -.pnpm-debug.log - -.envrc - -esm -reproductions/* -!reproductions/basic-sqlite -!reproductions/tracing -!reproductions/pnpm-workspace.yaml - -dev.db -junit.xml -/output.txt diff --git a/query-engine/driver-adapters/js/.npmrc b/query-engine/driver-adapters/js/.npmrc deleted file mode 100644 index c87ec9b9e3d3..000000000000 --- a/query-engine/driver-adapters/js/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -git-checks=false -access=public diff --git a/query-engine/driver-adapters/js/.nvmrc b/query-engine/driver-adapters/js/.nvmrc deleted file mode 100644 index 8c60e1e54f37..000000000000 --- a/query-engine/driver-adapters/js/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -v20.5.1 diff --git a/query-engine/driver-adapters/js/.prettierrc.yml b/query-engine/driver-adapters/js/.prettierrc.yml deleted file mode 100644 index f0beb50a2167..000000000000 --- a/query-engine/driver-adapters/js/.prettierrc.yml +++ /dev/null @@ -1,5 +0,0 @@ -tabWidth: 2 -trailingComma: all -singleQuote: true -semi: false -printWidth: 120 diff --git a/query-engine/driver-adapters/js/README.md b/query-engine/driver-adapters/js/README.md deleted file mode 100644 index 926d6db2b0a8..000000000000 --- a/query-engine/driver-adapters/js/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# Prisma Driver Adapters - - - - - - - -
- -This TypeScript monorepo contains the following packages: -- `@prisma/driver-adapter-utils` - - Internal set of utilities and types for Prisma's driver adapters. -- `@prisma/adapter-neon` - - Prisma's Driver Adapter that wraps the `@neondatabase/serverless` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:neon"` -- `@prisma/adapter-planetscale` - - Prisma's Driver Adapter that wraps the `@planetscale/database` driver - - It uses `provider = "mysql"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:planetscale"` -- `@prisma/adapter-pg` - - Prisma's Driver Adapter that wraps the `pg` driver - - It uses `provider = "postgres"` - - It exposes debug logs via `DEBUG="prisma:driver-adapter:pg"` - -## Get Started - -We assume Node.js `v18.16.1`+ is installed. If not, run `nvm use` in the current directory. -This is very important to double-check if you have multiple versions installed, as PlanetScale requires either Node.js `v18.16.1`+ or a custom `fetch` function. - -Install `pnpm` via: - -```sh -npm i -g pnpm -``` - -## Development - -- Install Node.js dependencies via `pnpm i` -- Build and link TypeScript packages via `pnpm build` -- Publish packages to `npm` via `pnpm publish -r` diff --git a/query-engine/driver-adapters/js/adapter-libsql/.gitignore b/query-engine/driver-adapters/js/adapter-libsql/.gitignore deleted file mode 100644 index c370cb644f95..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/.gitignore +++ /dev/null @@ -1 +0,0 @@ -test.db diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md deleted file mode 100644 index 5ca415ea8ec9..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ /dev/null @@ -1,95 +0,0 @@ -# @prisma/adapter-libsql - -Prisma driver adapter for Turso and libSQL. - -See https://prisma.io/turso for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [getting started with Turso](https://www.prisma.io/blog/prisma-turso-ea-support-rXGd_Tmy3UXX#create-a-database-on-turso), you can use the Turso serverless driver to connect to your database. You will need to install the `@prisma/adapter-libsql` driver adapter and the `@libsql/client` serverless driver. - -```sh -npm install @prisma/adapter-libsql -npm install @libsql/client -``` - -Make sure your Turso database connection string and authentication token is copied over to your `.env` file. The connection string will start with `libsql://`. - -```env -# .env -TURSO_AUTH_TOKEN="eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9..." -TURSO_DATABASE_URL="libsql://turso-prisma-random-user.turso.io" -``` - -You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = "file:./dev.db" -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to setup migrations - -As Turso needs to sync between a local sqlite database and another one hosted on Turso Cloud, an additional migration setup is needed. In particular, anytime you modify models and relations in your `schema.prisma` file, you should: - -1. Create a baseline migration - -```sh -npx prisma migrate diff --from-empty \ - --to-schema-datamodel prisma/schema.prisma \ - --script > baseline.sql -``` - -2. Apply the migration to your Turso database - -```sh -turso db shell turso-prisma < baseline.sql -``` - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the libSQL serverless database driver -3. Instantiate the Prisma libSQL adapter with the libSQL serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { PrismaClient } from '@prisma/client'; -import { PrismaLibSQL } from '@prisma/adapter-libsql'; -import { createClient } from '@libsql/client'; - -// Setup -const connectionString = `${process.env.TURSO_DATABASE_URL}`; -const authToken = `${process.env.TURSO_AUTH_TOKEN}`; - -// Init prisma client -const libsql = createClient({ - url: connectionString, - authToken, -}); -const adapter = new PrismaLibSQL(libsql); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Your Prisma Client instance now uses a **single** remote Turso database. -You can take it a step further by setting up database replicas. Turso automatically picks the closest replica to your app for read queries when you create replicas. No additional logic is required to define how the routing of the read queries should be handled. Write queries will be forwarded to the primary database. -We encourage you to create an issue if you find something missing or run into a bug. - -If you have any feedback about our libSQL Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21345) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-libsql/package.json b/query-engine/driver-adapters/js/adapter-libsql/package.json deleted file mode 100644 index fbce33c98a29..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-libsql", - "version": "0.0.0", - "description": "Prisma's driver adapter for libSQL and Turso", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json", - "test": "node --loader tsx --test tests/*.test.mts" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alexey Orlenko ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "async-mutex": "0.4.0" - }, - "devDependencies": { - "@libsql/client": "0.3.5" - }, - "peerDependencies": { - "@libsql/client": "^0.3.5" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts b/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts deleted file mode 100644 index b2fa4b5b4095..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/conversion.ts +++ /dev/null @@ -1,161 +0,0 @@ -import { ColumnTypeEnum, ColumnType, Debug } from '@prisma/driver-adapter-utils' -import { Row, Value } from '@libsql/client' -import { isArrayBuffer } from 'node:util/types' - -const debug = Debug('prisma:driver-adapter:libsql:conversion') - -// Mirrors sqlite/conversion.rs in quaint -function mapDeclType(declType: string): ColumnType | null { - switch (declType.toUpperCase()) { - case '': - return null - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'FLOAT': - return ColumnTypeEnum.Float - case 'DOUBLE': - case 'DOUBLE PRECISION': - case 'NUMERIC': - case 'REAL': - return ColumnTypeEnum.Double - case 'TINYINT': - case 'SMALLINT': - case 'MEDIUMINT': - case 'INT': - case 'INTEGER': - case 'SERIAL': - case 'INT2': - return ColumnTypeEnum.Int32 - case 'BIGINT': - case 'UNSIGNED BIG INT': - case 'INT8': - return ColumnTypeEnum.Int64 - case 'DATETIME': - case 'TIMESTAMP': - return ColumnTypeEnum.DateTime - case 'TIME': - return ColumnTypeEnum.Time - case 'DATE': - return ColumnTypeEnum.Date - case 'TEXT': - case 'CLOB': - case 'CHARACTER': - case 'VARCHAR': - case 'VARYING CHARACTER': - case 'NCHAR': - case 'NATIVE CHARACTER': - case 'NVARCHAR': - return ColumnTypeEnum.Text - case 'BLOB': - return ColumnTypeEnum.Bytes - case 'BOOLEAN': - return ColumnTypeEnum.Boolean - default: - debug('unknown decltype:', declType) - return null - } -} - -function mapDeclaredColumnTypes(columntTypes: string[]): [out: Array, empty: Set] { - const emptyIndices = new Set() - const result = columntTypes.map((typeName, index) => { - const mappedType = mapDeclType(typeName) - if (mappedType === null) { - emptyIndices.add(index) - } - return mappedType - }) - return [result, emptyIndices] -} - -export function getColumnTypes(declaredTypes: string[], rows: Row[]): ColumnType[] { - const [columnTypes, emptyIndices] = mapDeclaredColumnTypes(declaredTypes) - - if (emptyIndices.size === 0) { - return columnTypes as ColumnType[] - } - - columnLoop: for (const columnIndex of emptyIndices) { - // No declared column type in db schema, infer using first non-null value - for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { - const candidateValue = rows[rowIndex][columnIndex] - if (candidateValue !== null) { - columnTypes[columnIndex] = inferColumnType(candidateValue) - continue columnLoop - } - } - - // No non-null value found for this column, fall back to int32 to mimic what quaint does - columnTypes[columnIndex] = ColumnTypeEnum.Int32 - } - - return columnTypes as ColumnType[] -} - -function inferColumnType(value: NonNullable): ColumnType { - switch (typeof value) { - case 'string': - return ColumnTypeEnum.Text - case 'bigint': - return ColumnTypeEnum.Int64 - case 'boolean': - return ColumnTypeEnum.Boolean - case 'number': - return ColumnTypeEnum.UnknownNumber - case 'object': - return inferObjectType(value) - default: - throw new UnexpectedTypeError(value) - } -} - -function inferObjectType(value: {}): ColumnType { - if (isArrayBuffer(value)) { - return ColumnTypeEnum.Bytes - } - throw new UnexpectedTypeError(value) -} - -class UnexpectedTypeError extends Error { - name = 'UnexpectedTypeError' - constructor(value: unknown) { - const type = typeof value - const repr = type === 'object' ? JSON.stringify(value) : String(value) - super(`unexpected value of type ${type}: ${repr}`) - } -} - -export function mapRow(row: Row, columnTypes: ColumnType[]): unknown[] { - // `Row` doesn't have map, so we copy the array once and modify it in-place - // to avoid allocating and copying twice if we used `Array.from(row).map(...)`. - const result: unknown[] = Array.from(row) - - for (let i = 0; i < result.length; i++) { - const value = result[i] - - // Convert bigint to string as we can only use JSON-encodable types here - if (typeof value === 'bigint') { - result[i] = value.toString() - } - - // Convert array buffers to arrays of bytes. - // Base64 would've been more efficient but would collide with the existing - // logic that treats string values of type Bytes as raw UTF-8 bytes that was - // implemented for other adapters. - if (isArrayBuffer(value)) { - result[i] = Array.from(new Uint8Array(value)) - } - - // If an integer is required and the current number isn't one, - // discard the fractional part. - if ( - typeof value === 'number' && - (columnTypes[i] === ColumnTypeEnum.Int32 || columnTypes[i] === ColumnTypeEnum.Int64) && - !Number.isInteger(value) - ) { - result[i] = Math.trunc(value) - } - } - - return result -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts b/query-engine/driver-adapters/js/adapter-libsql/src/index.ts deleted file mode 100644 index 04a95cc4cfcd..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaLibSQL } from './libsql' diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts deleted file mode 100644 index 6528c8f44a8a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import type { - InStatement, - Client as LibSqlClientRaw, - Transaction as LibSqlTransactionRaw, - ResultSet as LibSqlResultSet, -} from '@libsql/client' -import { Mutex } from 'async-mutex' -import { getColumnTypes, mapRow } from './conversion' - -const debug = Debug('prisma:driver-adapter:libsql') - -type StdClient = LibSqlClientRaw -type TransactionClient = LibSqlTransactionRaw - -const LOCK_TAG = Symbol() - -class LibSqlQueryable implements Queryable { - readonly flavour = 'sqlite'; - - [LOCK_TAG] = new Mutex() - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - - return ioResult.map(({ columns, rows, columnTypes: declaredColumnTypes }) => { - const columnTypes = getColumnTypes(declaredColumnTypes, rows) - - return { - columnNames: columns, - columnTypes, - rows: rows.map((row) => mapRow(row, columnTypes)), - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise> { - const release = await this[LOCK_TAG].acquire() - try { - const result = await this.client.execute(query as InStatement) - return ok(result) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - const rawCode = error['rawCode'] ?? e.cause?.['rawCode'] - if (typeof rawCode === 'number') { - return err({ - kind: 'Sqlite', - extendedCode: rawCode, - message: error.message, - }) - } - throw error - } finally { - release() - } - } -} - -class LibSqlTransaction extends LibSqlQueryable implements Transaction { - finished = false - - constructor(client: TransactionClient, readonly options: TransactionOptions, readonly unlockParent: () => void) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - - try { - await this.client.commit() - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - - try { - await this.client.rollback() - } catch (error) { - debug('error in rollback:', error) - } finally { - this.unlockParent() - } - - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.finished = true - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaLibSQL extends LibSqlQueryable implements DriverAdapter { - constructor(client: StdClient) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const release = await this[LOCK_TAG].acquire() - - try { - const tx = await this.client.transaction('deferred') - return ok(new LibSqlTransaction(tx, options, release)) - } catch (e) { - // note: we only release the lock if creating the transaction fails, it must stay locked otherwise, - // hence `catch` and rethrowing the error and not `finally`. - release() - throw e - } - } - - async close(): Promise> { - await this[LOCK_TAG].acquire() - this.client.close() - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts b/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts deleted file mode 100644 index f7f1b474a300..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tests/types.test.mts +++ /dev/null @@ -1,151 +0,0 @@ -import assert from 'node:assert/strict' -import { describe, it } from 'node:test' -import { Config, createClient } from '@libsql/client' -import { PrismaLibSQL } from '../dist/index.js' -import { ColumnTypeEnum } from '@jkomyno/prisma-driver-adapter-utils' - -function connect(config?: Partial): PrismaLibSQL { - const client = createClient({ url: 'file:test.db', ...config }) - return new PrismaLibSQL(client) -} - -it('checks declared types', async () => { - const client = connect() - - await client.executeRaw({ - sql: ` - DROP TABLE IF EXISTS types; - `, - args: [], - }) - - await client.executeRaw({ - sql: ` - CREATE TABLE types ( - id INTEGER PRIMARY KEY, - real REAL, - bigint BIGINT, - date DATETIME, - text TEXT, - blob BLOB - ) - `, - args: [], - }) - - const result = await client.queryRaw({ - sql: ` - SELECT * FROM types - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ - ColumnTypeEnum.Int32, - ColumnTypeEnum.Double, - ColumnTypeEnum.Int64, - ColumnTypeEnum.DateTime, - ColumnTypeEnum.Text, - ColumnTypeEnum.Bytes, - ]) -}) - -it('infers types when sqlite decltype is not available', async () => { - const client = connect() - - const result = await client.queryRaw({ - sql: ` - SELECT 1 as first, 'test' as second - `, - args: [], - }) - - assert(result.ok) - assert.deepEqual(result.value.columnTypes, [ColumnTypeEnum.Int64, ColumnTypeEnum.Text]) -}) - -describe('int64 with different intMode', () => { - const N = 2n ** 63n - 1n - - it('correctly infers int64 with intMode=number for safe JS integers', async () => { - const client = connect({ intMode: 'number' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [Number.MAX_SAFE_INTEGER], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], Number.MAX_SAFE_INTEGER) - }) - - it("doesn't support very big int64 with intMode=number", async () => { - const client = connect({ intMode: 'number' }) - - assert.rejects( - client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }), - ) - }) - - it('correctly infers int64 with intMode=bigint', async () => { - const client = connect({ intMode: 'bigint' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - - // bigints are converted to strings because we can't currently pass a bigint - // to rust due to a napi.rs limitation - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it('correctly infers int64 with intMode=string when we have decltype', async () => { - const client = connect({ intMode: 'string' }) - - await client.executeRaw({ - sql: `DROP TABLE IF EXISTS test`, - args: [], - }) - - await client.executeRaw({ - sql: `CREATE TABLE test (int64 BIGINT)`, - args: [], - }) - - await client.executeRaw({ - sql: `INSERT INTO test (int64) VALUES (?)`, - args: [N], - }) - - const result = await client.queryRaw({ - sql: `SELECT int64 FROM test`, - args: [], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Int64) - assert.equal(result.value.rows[0][0], N.toString()) - }) - - it("can't infer int64 with intMode=string without schema", async () => { - const client = connect({ intMode: 'string' }) - - const result = await client.queryRaw({ - sql: `SELECT ?`, - args: [N], - }) - - assert(result.ok) - assert.equal(result.value.columnTypes[0], ColumnTypeEnum.Text) - assert.equal(result.value.rows[0][0], N.toString()) - }) -}) diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json b/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-libsql/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md deleted file mode 100644 index f36f44c6bca4..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# @prisma/adapter-neon - -Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [creating your database on Neon](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you'll need to install the `@prisma/adapter-neon` driver adapter, Neon’s serverless database driver `@neondatabase/serverless`, and `ws` to set up a WebSocket connection for use by Neon. - -```sh -npm install @prisma/adapter-neon -npm install @neondatabase/serverless -npm install ws -``` - -Make sure your [Neon database connection string](https://neon.tech/docs/connect/connect-from-any-app) is copied over to your `.env` file. The connection string will start with `postgres://`. - -```env -# .env -DATABASE_URL="postgres://..." -``` - -Make sure you also include the `driverAdapters` Preview feature in your `schema.prisma`. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgresql" - url = env("DATABASE_URL") -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the Neon serverless database driver -3. Instantiate the Prisma Neon adapter with the Neon serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { Pool, neonConfig } from '@neondatabase/serverless'; -import { PrismaNeon } from '@prisma/adapter-neon'; -import { PrismaClient } from '@prisma/client'; -import ws from 'ws'; - -// Setup -neonConfig.webSocketConstructor = ws; -const connectionString = `${process.env.DATABASE_URL}`; - -// Init prisma client -const pool = new Pool({ connectionString }); -const adapter = new PrismaNeon(pool); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Now your code has built-in benefits of the Neon serverless driver, such as WebSocket connections and [message pipelining](https://neon.tech/blog/quicker-serverless-postgres), while Prisma covers connection creation and destruction, error handling, and type safety. If you have any feedback about our Neon Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21346) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-neon/package.json b/query-engine/driver-adapters/js/adapter-neon/package.json deleted file mode 100644 index 02005a13572f..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "@prisma/adapter-neon", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@neondatabase/serverless\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "postgres-array": "^3.0.2" - }, - "devDependencies": { - "@neondatabase/serverless": "^0.6.0" - }, - "peerDependencies": { - "@neondatabase/serverless": "^0.6.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts deleted file mode 100644 index 78f285240599..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from '@neondatabase/serverless' -import { parse as parseArray } from 'postgres-array' - -const ScalarColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in ScalarColumnType). - * - * See the semantics of each of this code in: - * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat - */ -const ArrayColumnType = { - BIT_ARRAY: 1561, - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - CIDR_ARRAY: 651, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INET_ARRAY: 1041, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - INT8_ARRAY: 1016, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - OID_ARRAY: 1028, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARBIT_ARRAY: 1563, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: - return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: - return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: - return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: - return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: - return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: - return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: - return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.VARBIT_ARRAY: - case ArrayColumnType.BIT_ARRAY: - case ArrayColumnType.INET_ARRAY: - case ArrayColumnType.CIDR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - case ArrayColumnType.INT8_ARRAY: - case ArrayColumnType.OID_ARRAY: - return ColumnTypeEnum.Int64Array - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -function normalize_array(element_normalizer: (string) => string): (string) => string[] { - return (str) => parseArray(str, element_normalizer) -} - -/****************************/ -/* Time-related data-types */ -/****************************/ - -function normalize_numeric(numeric: string): string { - return numeric -} - -types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - -/****************************/ -/* Time-related data-types */ -/****************************/ - - -function normalize_date(date: string): string { - return date -} - -function normalize_timestamp(time: string): string { - return time -} - -function normalize_timestampz(time: string): string { - return time.split("+")[0] -} - -/* - * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f - */ - -function normalize_time(time: string): string { - return time -} - -function normalize_timez(time: string): string { - // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver - // in quaint. See quaint/src/connector/postgres/conversion.rs - return time.split("+")[0] -} - -types.setTypeParser(ScalarColumnType.TIME, normalize_time) -types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -types.setTypeParser(ScalarColumnType.DATE, normalize_date) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - -/******************/ -/* Money handling */ -/******************/ - -function normalize_money(money: string): string { - return money.slice(1) -} - -types.setTypeParser(ScalarColumnType.MONEY, normalize_money) -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - - -/*****************/ -/* JSON handling */ -/*****************/ - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function toJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - - -types.setTypeParser(ScalarColumnType.JSONB, toJson) -types.setTypeParser(ScalarColumnType.JSON, toJson) - -/************************/ -/* Binary data handling */ -/************************/ - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -/* - * BYTEA - arbitrary raw binary strings - */ - -const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) - -/* - * BYTEA_ARRAY - arrays of arbitrary raw binary strings - */ - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map((buf) => buf ? encodeBuffer(buf) : null) -}) - -/* BIT_ARRAY, VARBIT_ARRAY */ - -function normalizeBit(bit: string): string { - return bit -} - -types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-neon/src/index.ts b/query-engine/driver-adapters/js/adapter-neon/src/index.ts deleted file mode 100644 index f160d413ade0..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaNeon, PrismaNeonHTTP } from './neon' diff --git a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts b/query-engine/driver-adapters/js/adapter-neon/src/neon.ts deleted file mode 100644 index e8fe40ada22f..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/src/neon.ts +++ /dev/null @@ -1,165 +0,0 @@ -import type neon from '@neondatabase/serverless' -import { Debug, ok, err } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:neon') - -type ARRAY_MODE_ENABLED = true - -type PerformIOResult = neon.QueryResult | neon.FullQueryResults - -/** - * Base class for http client, ws client and ws transaction - */ -abstract class NeonQueryable implements Queryable { - readonly flavour = 'postgres' - - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map((r) => r.rowCount ?? 0) - } - - abstract performIO(query: Query): Promise> -} - -/** - * Base class for WS-based queryables: top-level client and transaction - */ -class NeonWsQueryable extends NeonQueryable { - constructor(protected client: ClientT) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - return ok(await this.client.query({ text: sql, values, rowMode: 'array' })) - } catch (e) { - debug('Error in performIO: %O', e) - if (e && e.code) { - return err({ - kind: 'Postgres', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw e - } - } -} - -class NeonTransaction extends NeonWsQueryable implements Transaction { - finished = false - - constructor(client: neon.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return Promise.resolve(ok(undefined)) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaNeon extends NeonWsQueryable implements DriverAdapter { - private isRunning = true - - constructor(pool: neon.Pool) { - super(pool) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new NeonTransaction(connection, options)) - } - - async close() { - if (this.isRunning) { - await this.client.end() - this.isRunning = false - } - return ok(undefined) - } -} - -export class PrismaNeonHTTP extends NeonQueryable implements DriverAdapter { - constructor(private client: neon.NeonQueryFunction) { - super() - } - - override async performIO(query: Query): Promise> { - const { sql, args: values } = query - return ok( - await this.client(sql, values, { - arrayMode: true, - fullResults: true, - }), - ) - } - - startTransaction(): Promise> { - return Promise.reject(new Error('Transactions are not supported in HTTP mode')) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json b/query-engine/driver-adapters/js/adapter-neon/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-neon/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/adapter-pg/README.md b/query-engine/driver-adapters/js/adapter-pg/README.md deleted file mode 100644 index b8463742e25c..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/adapter-pg - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/adapter-pg/package.json b/query-engine/driver-adapters/js/adapter-pg/package.json deleted file mode 100644 index 7514569c562a..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@prisma/adapter-pg", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"pg\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Tom Houlé ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*", - "postgres-array": "^3.0.2" - }, - "devDependencies": { - "pg": "^8.11.3", - "@types/pg": "^8.10.2" - }, - "peerDependencies": { - "pg": "^8.11.3" - } -} diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts deleted file mode 100644 index c26b13877927..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { ColumnTypeEnum, type ColumnType, JsonNullMarker } from '@prisma/driver-adapter-utils' -import { types } from 'pg' -import { parse as parseArray } from 'postgres-array' - -const ScalarColumnType = types.builtins - -/** - * PostgreSQL array column types (not defined in ScalarColumnType). - * - * See the semantics of each of this code in: - * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat - */ -const ArrayColumnType = { - BIT_ARRAY: 1561, - BOOL_ARRAY: 1000, - BYTEA_ARRAY: 1001, - BPCHAR_ARRAY: 1014, - CHAR_ARRAY: 1002, - CIDR_ARRAY: 651, - DATE_ARRAY: 1182, - FLOAT4_ARRAY: 1021, - FLOAT8_ARRAY: 1022, - INET_ARRAY: 1041, - INT2_ARRAY: 1005, - INT4_ARRAY: 1007, - INT8_ARRAY: 1016, - JSONB_ARRAY: 3807, - JSON_ARRAY: 199, - MONEY_ARRAY: 791, - NUMERIC_ARRAY: 1231, - OID_ARRAY: 1028, - TEXT_ARRAY: 1009, - TIMESTAMP_ARRAY: 1115, - TIME_ARRAY: 1183, - UUID_ARRAY: 2951, - VARBIT_ARRAY: 1563, - VARCHAR_ARRAY: 1015, - XML_ARRAY: 143, -} - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(fieldTypeId: number): ColumnType { - switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: - return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: - return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: - return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: - return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: - return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: - return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: - return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: - return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: - return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: - return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: - return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: - return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: - return ColumnTypeEnum.Bytes - case ArrayColumnType.INT2_ARRAY: - case ArrayColumnType.INT4_ARRAY: - return ColumnTypeEnum.Int32Array - case ArrayColumnType.FLOAT4_ARRAY: - return ColumnTypeEnum.FloatArray - case ArrayColumnType.FLOAT8_ARRAY: - return ColumnTypeEnum.DoubleArray - case ArrayColumnType.NUMERIC_ARRAY: - case ArrayColumnType.MONEY_ARRAY: - return ColumnTypeEnum.NumericArray - case ArrayColumnType.BOOL_ARRAY: - return ColumnTypeEnum.BooleanArray - case ArrayColumnType.CHAR_ARRAY: - return ColumnTypeEnum.CharArray - case ArrayColumnType.BPCHAR_ARRAY: - case ArrayColumnType.TEXT_ARRAY: - case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.VARBIT_ARRAY: - case ArrayColumnType.BIT_ARRAY: - case ArrayColumnType.INET_ARRAY: - case ArrayColumnType.CIDR_ARRAY: - case ArrayColumnType.XML_ARRAY: - return ColumnTypeEnum.TextArray - case ArrayColumnType.DATE_ARRAY: - return ColumnTypeEnum.DateArray - case ArrayColumnType.TIME_ARRAY: - return ColumnTypeEnum.TimeArray - case ArrayColumnType.TIMESTAMP_ARRAY: - return ColumnTypeEnum.DateTimeArray - case ArrayColumnType.JSON_ARRAY: - case ArrayColumnType.JSONB_ARRAY: - return ColumnTypeEnum.JsonArray - case ArrayColumnType.BYTEA_ARRAY: - return ColumnTypeEnum.BytesArray - case ArrayColumnType.UUID_ARRAY: - return ColumnTypeEnum.UuidArray - case ArrayColumnType.INT8_ARRAY: - case ArrayColumnType.OID_ARRAY: - return ColumnTypeEnum.Int64Array - default: - if (fieldTypeId >= 10000) { - // Postgres Custom Types - return ColumnTypeEnum.Enum - } - throw new Error(`Unsupported column type: ${fieldTypeId}`) - } -} - -function normalize_array(element_normalizer: (string) => string): (string) => string[] { - return (str) => parseArray(str, element_normalizer) -} - -/****************************/ -/* Time-related data-types */ -/****************************/ - -function normalize_numeric(numeric: string): string { - return numeric -} - -types.setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -types.setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - -/****************************/ -/* Time-related data-types */ -/****************************/ - - -function normalize_date(date: string): string { - return date -} - -function normalize_timestamp(time: string): string { - return time -} - -function normalize_timestampz(time: string): string { - return time.split("+")[0] -} - -/* - * TIME, TIMETZ, TIME_ARRAY - converts value (or value elements) to a string in the format HH:mm:ss.f - */ - -function normalize_time(time: string): string { - return time -} - -function normalize_timez(time: string): string { - // Although it might be controversial, UTC is assumed in consistency with the behavior of rust postgres driver - // in quaint. See quaint/src/connector/postgres/conversion.rs - return time.split("+")[0] -} - -types.setTypeParser(ScalarColumnType.TIME, normalize_time) -types.setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -types.setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -types.setTypeParser(ScalarColumnType.DATE, normalize_date) -types.setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -types.setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -types.setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -types.setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - -/******************/ -/* Money handling */ -/******************/ - -function normalize_money(money: string): string { - return money.slice(1) -} - -types.setTypeParser(ScalarColumnType.MONEY, normalize_money) -types.setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - - -/*****************/ -/* JSON handling */ -/*****************/ - -/** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). - */ -function toJson(json: string): unknown { - return (json === 'null') ? JsonNullMarker : JSON.parse(json) -} - - -types.setTypeParser(ScalarColumnType.JSONB, toJson) -types.setTypeParser(ScalarColumnType.JSON, toJson) - -/************************/ -/* Binary data handling */ -/************************/ - -/** - * TODO: - * 1. Check if using base64 would be more efficient than this encoding. - * 2. Consider the possibility of eliminating re-encoding altogether - * and passing bytea hex format to the engine if that can be aligned - * with other adapter flavours. - */ -function encodeBuffer(buffer: Buffer) { - return Array.from(new Uint8Array(buffer)) -} - -/* - * BYTEA - arbitrary raw binary strings - */ - -const parsePgBytes = types.getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) - -/* - * BYTEA_ARRAY - arrays of arbitrary raw binary strings - */ - -const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] - -types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { - const buffers = parseBytesArray(serializedBytesArray) - return buffers.map((buf) => buf ? encodeBuffer(buf) : null) -}) - -/* BIT_ARRAY, VARBIT_ARRAY */ - -function normalizeBit(bit: string): string { - return bit -} - -types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-pg/src/index.ts b/query-engine/driver-adapters/js/adapter-pg/src/index.ts deleted file mode 100644 index f8e51ac2685b..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPg } from './pg' diff --git a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts b/query-engine/driver-adapters/js/adapter-pg/src/pg.ts deleted file mode 100644 index c34050778c39..000000000000 --- a/query-engine/driver-adapters/js/adapter-pg/src/pg.ts +++ /dev/null @@ -1,138 +0,0 @@ -import type pg from 'pg' -import { Debug, err, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - Query, - Queryable, - Result, - ResultSet, - Transaction, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { fieldToColumnType } from './conversion' - -const debug = Debug('prisma:driver-adapter:pg') - -type StdClient = pg.Pool -type TransactionClient = pg.PoolClient - -class PgQueryable implements Queryable { - readonly flavour = 'postgres' - - constructor(protected readonly client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - return ioResult.map(({ fields, rows }) => { - const columns = fields.map((field) => field.name) - const columnTypes = fields.map((field) => fieldToColumnType(field.dataTypeID)) - - return { - columnNames: columns, - columnTypes, - rows, - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - // Note: `rowsAffected` can sometimes be null (e.g., when executing `"BEGIN"`) - return (await this.performIO(query)).map(({ rowCount: rowsAffected }) => rowsAffected ?? 0) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise>> { - const { sql, args: values } = query - - try { - const result = await this.client.query({ text: sql, values, rowMode: 'array' }) - return ok(result) - } catch (e) { - const error = e as Error - debug('Error in performIO: %O', error) - if (e && e.code) { - return err({ - kind: 'Postgres', - code: e.code, - severity: e.severity, - message: e.message, - detail: e.detail, - column: e.column, - hint: e.hint, - }) - } - throw error - } - } -} - -class PgTransaction extends PgQueryable implements Transaction { - finished = false - - constructor(client: pg.PoolClient, readonly options: TransactionOptions) { - super(client) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.client.release() - return ok(undefined) - } - - dispose(): Result { - if (!this.finished) { - this.client.release() - } - return ok(undefined) - } -} - -export class PrismaPg extends PgQueryable implements DriverAdapter { - constructor(client: pg.Pool) { - super(client) - } - - async startTransaction(): Promise> { - const options: TransactionOptions = { - usePhantomQuery: false, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - const connection = await this.client.connect() - return ok(new PgTransaction(connection, options)) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md deleted file mode 100644 index a4cdc132036a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# @prisma/adapter-planetscale - -Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). - -See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. - -The following usage tutorial is valid for Prisma 5.4.2 and later versions. - -## How to install - -After [getting started with PlanetScale](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you can use the PlanetScale serverless driver to connect to your database. You will need to install the `@prisma/adapter-planetscale` driver adapter, the `@planetscale/database` serverless driver, and `undici` to provide a `fetch` function to the PlanetScale driver. - -```sh -npm install @prisma/adapter-planetscale -npm install @planetscale/database -npm install undici -``` - -Make sure your [PlanetScale database connection string](https://planetscale.com/docs/concepts/connection-strings) is copied over to your `.env` file. The connection string will start with `mysql://`. - -```env -# .env -DATABASE_URL="mysql://..." -``` - -You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. - -```prisma -// schema.prisma -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") - relationMode = "prisma" -} -``` - -Now run `npx prisma generate` to re-generate Prisma Client. - -## How to use - -In TypeScript, you will need to: - -1. Import packages -2. Set up the PlanetScale serverless database driver -3. Instantiate the Prisma PlanetScale adapter with the PlanetScale serverless database driver -4. Pass the driver adapter to the Prisma Client instance - -```typescript -// Import needed packages -import { connect } from '@planetscale/database'; -import { PrismaPlanetScale } from '@prisma/adapter-planetscale'; -import { PrismaClient } from '@prisma/client'; -import { fetch as undiciFetch } from 'undici'; - -// Setup -const connectionString = `${process.env.DATABASE_URL}`; - -// Init prisma client -const connection = connect({ url: connectionString, fetch: undiciFetch }); -const adapter = new PrismaPlanetScale(connection); -const prisma = new PrismaClient({ adapter }); - -// Use Prisma Client as normal -``` - -Your Prisma Client instance now uses PlanetScale's [`database-js`](https://github.com/planetscale/database-js), which can improve [`connection reliability and performance`](https://planetscale.com/blog/faster-mysql-with-http3). It uses HTTP requests instead of Prisma’s connection pool, but Prisma will continue to handle error handling and type safety. If you have any feedback about our PlanetScale Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21347) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-planetscale/package.json b/query-engine/driver-adapters/js/adapter-planetscale/package.json deleted file mode 100644 index 59d59704ab50..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@prisma/adapter-planetscale", - "version": "0.0.0", - "description": "Prisma's driver adapter for \"@planetscale/database\"", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "@prisma/driver-adapter-utils": "workspace:*" - }, - "devDependencies": { - "@planetscale/database": "^1.11.0" - }, - "peerDependencies": { - "@planetscale/database": "^1.11.0" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts deleted file mode 100644 index f6cf8563dc24..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/conversion.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { ColumnTypeEnum, type ColumnType } from '@prisma/driver-adapter-utils' - -// See: https://github.com/planetscale/vitess-types/blob/06235e372d2050b4c0fff49972df8111e696c564/src/vitess/query/v16/query.proto#L108-L218 -export type PlanetScaleColumnType - = 'NULL' - | 'INT8' - | 'UINT8' - | 'INT16' - | 'UINT16' - | 'INT24' - | 'UINT24' - | 'INT32' - | 'UINT32' - | 'INT64' - | 'UINT64' - | 'FLOAT32' - | 'FLOAT64' - | 'TIMESTAMP' - | 'DATE' - | 'TIME' - | 'DATETIME' - | 'YEAR' - | 'DECIMAL' - | 'TEXT' - | 'BLOB' - | 'VARCHAR' - | 'VARBINARY' - | 'CHAR' - | 'BINARY' - | 'BIT' - | 'ENUM' - | 'SET' // unsupported - | 'TUPLE' // unsupported - | 'GEOMETRY' - | 'JSON' - | 'EXPRESSION' // unsupported - | 'HEXNUM' - | 'HEXVAL' - | 'BITNUM' - -/** - * This is a simplification of quaint's value inference logic. Take a look at quaint's conversion.rs - * module to see how other attributes of the field packet such as the field length are used to infer - * the correct quaint::Value variant. - */ -export function fieldToColumnType(field: PlanetScaleColumnType): ColumnType { - switch (field) { - case 'INT8': - case 'UINT8': - case 'INT16': - case 'UINT16': - case 'INT24': - case 'UINT24': - case 'INT32': - case 'UINT32': - case 'YEAR': - return ColumnTypeEnum.Int32 - case 'INT64': - case 'UINT64': - return ColumnTypeEnum.Int64 - case 'FLOAT32': - return ColumnTypeEnum.Float - case 'FLOAT64': - return ColumnTypeEnum.Double - case 'TIMESTAMP': - case 'DATETIME': - return ColumnTypeEnum.DateTime - case 'DATE': - return ColumnTypeEnum.Date - case 'TIME': - return ColumnTypeEnum.Time - case 'DECIMAL': - return ColumnTypeEnum.Numeric - case 'CHAR': - return ColumnTypeEnum.Char - case 'TEXT': - case 'VARCHAR': - return ColumnTypeEnum.Text - case 'ENUM': - return ColumnTypeEnum.Enum - case 'JSON': - return ColumnTypeEnum.Json - case 'BLOB': - case 'BINARY': - case 'VARBINARY': - case 'BIT': - case 'BITNUM': - case 'HEXNUM': - case 'HEXVAL': - case 'GEOMETRY': - return ColumnTypeEnum.Bytes - case 'NULL': - // Fall back to Int32 for consistency with quaint. - return ColumnTypeEnum.Int32 - default: - throw new Error(`Unsupported column type: ${field}`) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts deleted file mode 100644 index 013409c8424f..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/deferred.ts +++ /dev/null @@ -1,13 +0,0 @@ -export type Deferred = { - resolve(value: T | PromiseLike): void; - reject(reason: unknown): void; -} - - -export function createDeferred(): [Deferred, Promise] { - const deferred = {} as Deferred - return [deferred, new Promise((resolve, reject) => { - deferred.resolve = resolve - deferred.reject = reject - })] -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts deleted file mode 100644 index 5e8add856fbb..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { PrismaPlanetScale } from './planetscale' diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts deleted file mode 100644 index 5a52851112b2..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ /dev/null @@ -1,181 +0,0 @@ -import type planetScale from '@planetscale/database' -import { Debug, err, ok } from '@prisma/driver-adapter-utils' -import type { - DriverAdapter, - ResultSet, - Query, - Queryable, - Transaction, - Result, - TransactionOptions, -} from '@prisma/driver-adapter-utils' -import { type PlanetScaleColumnType, fieldToColumnType } from './conversion' -import { createDeferred, Deferred } from './deferred' - -const debug = Debug('prisma:driver-adapter:planetscale') - -class RollbackError extends Error { - constructor() { - super('ROLLBACK') - this.name = 'RollbackError' - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, RollbackError) - } - } -} - -class PlanetScaleQueryable implements Queryable { - readonly flavour = 'mysql' - constructor(protected client: ClientT) {} - - /** - * Execute a query given as SQL, interpolating the given parameters. - */ - async queryRaw(query: Query): Promise> { - const tag = '[js::query_raw]' - debug(`${tag} %O`, query) - - const ioResult = await this.performIO(query) - return ioResult.map(({ fields, insertId: lastInsertId, rows }) => { - const columns = fields.map((field) => field.name) - return { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: rows as ResultSet['rows'], - lastInsertId, - } - }) - } - - /** - * Execute a query given as SQL, interpolating the given parameters and - * returning the number of affected rows. - * Note: Queryable expects a u64, but napi.rs only supports u32. - */ - async executeRaw(query: Query): Promise> { - const tag = '[js::execute_raw]' - debug(`${tag} %O`, query) - - return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected) - } - - /** - * Run a query against the database, returning the result set. - * Should the query fail due to a connection error, the connection is - * marked as unhealthy. - */ - private async performIO(query: Query): Promise> { - const { sql, args: values } = query - - try { - const result = await this.client.execute(sql, values, { - as: 'array', - }) - return ok(result) - } catch (e) { - const error = e as Error - if (error.name === 'DatabaseError') { - const parsed = parseErrorMessage(error.message) - if (parsed) { - return err({ - kind: 'Mysql', - ...parsed, - }) - } - } - debug('Error in performIO: %O', error) - throw error - } - } -} - -function parseErrorMessage(message: string) { - const match = message.match( - /target: (?:.+?) vttablet: (?.+?) \(errno (?\d+)\) \(sqlstate (?.+?)\)/, - ) - - if (!match || !match.groups) { - return undefined - } - return { - code: Number(match.groups.code), - message: match.groups.message, - state: match.groups.state, - } -} - -class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { - finished = false - - constructor( - tx: planetScale.Transaction, - readonly options: TransactionOptions, - private txDeferred: Deferred, - private txResultPromise: Promise, - ) { - super(tx) - } - - async commit(): Promise> { - debug(`[js::commit]`) - - this.finished = true - this.txDeferred.resolve() - return Promise.resolve(ok(await this.txResultPromise)) - } - - async rollback(): Promise> { - debug(`[js::rollback]`) - - this.finished = true - this.txDeferred.reject(new RollbackError()) - return Promise.resolve(ok(await this.txResultPromise)) - } - - dispose(): Result { - if (!this.finished) { - this.rollback().catch(console.error) - } - return ok(undefined) - } -} - -export class PrismaPlanetScale extends PlanetScaleQueryable implements DriverAdapter { - constructor(client: planetScale.Connection) { - super(client) - } - - async startTransaction() { - const options: TransactionOptions = { - usePhantomQuery: true, - } - - const tag = '[js::startTransaction]' - debug(`${tag} options: %O`, options) - - return new Promise>((resolve, reject) => { - const txResultPromise = this.client - .transaction(async (tx) => { - const [txDeferred, deferredPromise] = createDeferred() - const txWrapper = new PlanetScaleTransaction(tx, options, txDeferred, txResultPromise) - - resolve(ok(txWrapper)) - return deferredPromise - }) - .catch((error) => { - // Rollback error is ignored (so that tx.rollback() won't crash) - // any other error is legit and is re-thrown - if (!(error instanceof RollbackError)) { - return reject(error) - } - - return undefined - }) - }) - } - - async close() { - return ok(undefined) - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json deleted file mode 100644 index 28c56f6c3a9a..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration" - } -} diff --git a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json b/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/adapter-planetscale/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/README.md b/query-engine/driver-adapters/js/driver-adapter-utils/README.md deleted file mode 100644 index 78938e802bd3..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# @prisma/driver-adapters-utils - -**INTERNAL PACKAGE, DO NOT USE** diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/package.json b/query-engine/driver-adapters/js/driver-adapter-utils/package.json deleted file mode 100644 index 64301a7a5533..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "@prisma/driver-adapter-utils", - "version": "0.0.0", - "description": "Internal set of utilities and types for Prisma's driver adapters.", - "main": "dist/index.js", - "module": "dist/index.mjs", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", - "lint": "tsc -p ./tsconfig.build.json" - }, - "files": [ - "dist", - "README.md" - ], - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": false, - "dependencies": { - "debug": "^4.3.4" - }, - "devDependencies": { - "@types/debug": "^4.1.8" - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts deleted file mode 100644 index 1e3aa36210cf..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/binder.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { Result, err, ok } from './result' -import type { ErrorCapturingDriverAdapter, DriverAdapter, Transaction, ErrorRegistry, ErrorRecord } from './types' - -class ErrorRegistryInternal implements ErrorRegistry { - private registeredErrors: ErrorRecord[] = [] - - consumeError(id: number): ErrorRecord | undefined { - return this.registeredErrors[id] - } - - registerNewError(error: unknown) { - let i = 0 - while (this.registeredErrors[i] !== undefined) { - i++ - } - this.registeredErrors[i] = { error } - return i - } -} - -// *.bind(adapter) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -export const bindAdapter = (adapter: DriverAdapter): ErrorCapturingDriverAdapter => { - const errorRegistry = new ErrorRegistryInternal() - - const startTransaction = wrapAsync(errorRegistry, adapter.startTransaction.bind(adapter)) - return { - errorRegistry, - queryRaw: wrapAsync(errorRegistry, adapter.queryRaw.bind(adapter)), - executeRaw: wrapAsync(errorRegistry, adapter.executeRaw.bind(adapter)), - flavour: adapter.flavour, - startTransaction: async (...args) => { - const result = await startTransaction(...args) - return result.map((tx) => bindTransaction(errorRegistry, tx)) - }, - close: wrapAsync(errorRegistry, adapter.close.bind(adapter)), - } -} - -// *.bind(transaction) is required to preserve the `this` context of functions whose -// execution is delegated to napi.rs. -const bindTransaction = (errorRegistry: ErrorRegistryInternal, transaction: Transaction): Transaction => { - return { - flavour: transaction.flavour, - options: transaction.options, - queryRaw: wrapAsync(errorRegistry, transaction.queryRaw.bind(transaction)), - executeRaw: wrapAsync(errorRegistry, transaction.executeRaw.bind(transaction)), - commit: wrapAsync(errorRegistry, transaction.commit.bind(transaction)), - rollback: wrapAsync(errorRegistry, transaction.rollback.bind(transaction)), - dispose: wrapSync(errorRegistry, transaction.dispose.bind(transaction)), - } -} - -function wrapAsync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Promise>, -): (...args: A) => Promise> { - return async (...args) => { - try { - return await fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJs', id }) - } - } -} - -function wrapSync( - registry: ErrorRegistryInternal, - fn: (...args: A) => Result, -): (...args: A) => Result { - return (...args) => { - try { - return fn(...args) - } catch (error) { - const id = registry.registerNewError(error) - return err({ kind: 'GenericJs', id }) - } - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts deleted file mode 100644 index 5ddc7f20b390..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/const.ts +++ /dev/null @@ -1,48 +0,0 @@ -// Same order as in rust driver-adapters' `ColumnType`. -// Note: exporting const enums causes lots of problems with bundlers, so we emulate -// them via regular dictionaries. -// See: https://hackmd.io/@dzearing/Sk3xV0cLs -export const ColumnTypeEnum = { - // Scalars - Int32: 0, - Int64: 1, - Float: 2, - Double: 3, - Numeric: 4, - Boolean: 5, - Char: 6, - Text: 7, - Date: 8, - Time: 9, - DateTime: 10, - Json: 11, - Enum: 12, - Bytes: 13, - Set: 14, - Uuid: 15, - - // Arrays - Int32Array: 64, - Int64Array: 65, - FloatArray: 66, - DoubleArray: 67, - NumericArray: 68, - BooleanArray: 69, - CharArray: 70, - TextArray: 71, - DateArray: 72, - TimeArray: 73, - DateTimeArray: 74, - JsonArray: 75, - EnumArray: 76, - BytesArray: 77, - UuidArray: 78, - - // Custom - UnknownNumber: 128, -} as const - -// This string value paired with `ColumnType.Json` will be treated as JSON `null` -// when convering to a quaint value. This is to work around JS/JSON null values -// already being used to represent database NULLs. -export const JsonNullMarker = '$__prisma_null' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts deleted file mode 100644 index e0a1fe380fa2..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/debug.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { debug as Debug } from 'debug' - -export { Debug } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts deleted file mode 100644 index e7c13be99966..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { bindAdapter } from './binder' -export { ColumnTypeEnum, JsonNullMarker } from './const' -export { Debug } from './debug' -export { ok, err, type Result } from './result' -export type * from './types' diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts deleted file mode 100644 index 5af95db68671..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/result.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Error } from './types' -export type Result = { - // common methods - map(fn: (value: T) => U): Result - flatMap(fn: (value: T) => Result): Result -} & ( - | { - readonly ok: true - readonly value: T - } - | { - readonly ok: false - readonly error: Error - } -) - -export function ok(value: T): Result { - return { - ok: true, - value, - map(fn) { - return ok(fn(value)) - }, - flatMap(fn) { - return fn(value) - }, - } -} - -export function err(error: Error): Result { - return { - ok: false, - error, - map() { - return err(error) - }, - flatMap() { - return err(error) - }, - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts deleted file mode 100644 index 92019f81824b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { ColumnTypeEnum } from './const' -import { Result } from './result' - -export type ColumnType = (typeof ColumnTypeEnum)[keyof typeof ColumnTypeEnum] - -export interface ResultSet { - /** - * List of column types appearing in a database query, in the same order as `columnNames`. - * They are used within the Query Engine to convert values from JS to Quaint values. - */ - columnTypes: Array - - /** - * List of column names appearing in a database query, in the same order as `columnTypes`. - */ - columnNames: Array - - /** - * List of rows retrieved from a database query. - * Each row is a list of values, whose length matches `columnNames` and `columnTypes`. - */ - rows: Array> - - /** - * The last ID of an `INSERT` statement, if any. - * This is required for `AUTO_INCREMENT` columns in MySQL and SQLite-flavoured databases. - */ - lastInsertId?: string -} - -export type Query = { - sql: string - args: Array -} - -export type Error = - | { - kind: 'GenericJs' - id: number - } - | { - kind: 'Postgres' - code: string - severity: string - message: string - detail: string | undefined - column: string | undefined - hint: string | undefined - } - | { - kind: 'Mysql' - code: number - message: string - state: string - } - | { - kind: 'Sqlite' - /** - * Sqlite extended error code: https://www.sqlite.org/rescode.html - */ - extendedCode: number - message: string - } - -export interface Queryable { - readonly flavour: 'mysql' | 'postgres' | 'sqlite' - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the type-aware result set of the query. - * - * This is the preferred way of executing `SELECT` queries. - */ - queryRaw(params: Query): Promise> - - /** - * Execute a query given as SQL, interpolating the given parameters, - * and returning the number of affected rows. - * - * This is the preferred way of executing `INSERT`, `UPDATE`, `DELETE` queries, - * as well as transactional queries. - */ - executeRaw(params: Query): Promise> -} - -export interface DriverAdapter extends Queryable { - /** - * Starts new transation. - */ - startTransaction(): Promise> - - /** - * Closes the connection to the database, if any. - */ - close: () => Promise> -} - -export type TransactionOptions = { - usePhantomQuery: boolean -} - -export interface Transaction extends Queryable { - /** - * Transaction options. - */ - readonly options: TransactionOptions - /** - * Commit the transaction. - */ - commit(): Promise> - /** - * Rolls back the transaction. - */ - rollback(): Promise> - /** - * Discards and closes the transaction which may or may not have been committed or rolled back. - * This operation must be synchronous. If the implementation requires calling creating new - * asynchronous tasks on the event loop, the driver is responsible for handling the errors - * appropriately to ensure they don't crash the application. - */ - dispose(): Result -} - -export interface ErrorCapturingDriverAdapter extends DriverAdapter { - readonly errorRegistry: ErrorRegistry -} - -export interface ErrorRegistry { - consumeError(id: number): ErrorRecord | undefined -} - -export type ErrorRecord = { error: unknown } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json deleted file mode 100644 index 2c2e266bdb3b..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.build.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "declaration", - } -} diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json b/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/driver-adapter-utils/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/package.json b/query-engine/driver-adapters/js/package.json deleted file mode 100644 index 2036794f8c02..000000000000 --- a/query-engine/driver-adapters/js/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "private": true, - "name": "js", - "version": "0.0.2", - "description": "", - "engines": { - "node": ">=16.13", - "pnpm": ">=8.6.6 <9" - }, - "license": "Apache-2.0", - "scripts": { - "build": "pnpm -r run build", - "lint": "pnpm -r run lint" - }, - "keywords": [], - "author": "", - "devDependencies": { - "@types/node": "^20.5.1", - "tsup": "^7.2.0", - "tsx": "^3.12.7", - "typescript": "^5.1.6" - } -} diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml deleted file mode 100644 index 9a82ffdbac63..000000000000 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ /dev/null @@ -1,1554 +0,0 @@ -lockfileVersion: '6.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - - .: - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - tsup: - specifier: ^7.2.0 - version: 7.2.0(typescript@5.1.6) - tsx: - specifier: ^3.12.7 - version: 3.12.7 - typescript: - specifier: ^5.1.6 - version: 5.1.6 - - adapter-libsql: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - async-mutex: - specifier: 0.4.0 - version: 0.4.0 - devDependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - - adapter-neon: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - postgres-array: - specifier: ^3.0.2 - version: 3.0.2 - devDependencies: - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - - adapter-pg: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - postgres-array: - specifier: ^3.0.2 - version: 3.0.2 - devDependencies: - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - - adapter-planetscale: - dependencies: - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - devDependencies: - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - - connector-test-kit-executor: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: 1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - undici: - specifier: ^5.26.2 - version: 5.26.2 - - driver-adapter-utils: - dependencies: - debug: - specifier: ^4.3.4 - version: 4.3.4 - devDependencies: - '@types/debug': - specifier: ^4.1.8 - version: 4.1.8 - - smoke-test-js: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: ^1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: workspace:* - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: workspace:* - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: workspace:* - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: workspace:* - version: link:../adapter-planetscale - '@prisma/client': - specifier: 5.4.2 - version: 5.4.2(prisma@5.4.2) - '@prisma/driver-adapter-utils': - specifier: workspace:* - version: link:../driver-adapter-utils - pg: - specifier: ^8.11.3 - version: 8.11.3 - superjson: - specifier: ^1.13.1 - version: 1.13.1 - undici: - specifier: ^5.26.2 - version: 5.26.2 - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - cross-env: - specifier: ^7.0.3 - version: 7.0.3 - prisma: - specifier: 5.4.2 - version: 5.4.2 - tsx: - specifier: ^3.12.7 - version: 3.12.7 - -packages: - - /@esbuild-kit/cjs-loader@2.4.2: - resolution: {integrity: sha512-BDXFbYOJzT/NBEtp71cvsrGPwGAMGRB/349rwKuoxNSiKjPraNNnlK6MIIabViCjqZugu6j+xeMDlEkWdHHJSg==} - dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 - dev: true - - /@esbuild-kit/core-utils@3.2.2: - resolution: {integrity: sha512-Ub6LaRaAgF80dTSzUdXpFLM1pVDdmEVB9qb5iAzSpyDlX/mfJTFGOnZ516O05p5uWWteNviMKi4PAyEuRxI5gA==} - dependencies: - esbuild: 0.18.20 - source-map-support: 0.5.21 - dev: true - - /@esbuild-kit/esm-loader@2.5.5: - resolution: {integrity: sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw==} - dependencies: - '@esbuild-kit/core-utils': 3.2.2 - get-tsconfig: 4.7.0 - dev: true - - /@esbuild/android-arm64@0.18.20: - resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} - engines: {node: '>=12'} - cpu: [arm64] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@esbuild/android-arm@0.18.20: - resolution: {integrity: sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==} - engines: {node: '>=12'} - cpu: [arm] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@esbuild/android-x64@0.18.20: - resolution: {integrity: sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==} - engines: {node: '>=12'} - cpu: [x64] - os: [android] - requiresBuild: true - dev: true - optional: true - - /@esbuild/darwin-arm64@0.18.20: - resolution: {integrity: sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /@esbuild/darwin-x64@0.18.20: - resolution: {integrity: sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /@esbuild/freebsd-arm64@0.18.20: - resolution: {integrity: sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==} - engines: {node: '>=12'} - cpu: [arm64] - os: [freebsd] - requiresBuild: true - dev: true - optional: true - - /@esbuild/freebsd-x64@0.18.20: - resolution: {integrity: sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [freebsd] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-arm64@0.18.20: - resolution: {integrity: sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-arm@0.18.20: - resolution: {integrity: sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==} - engines: {node: '>=12'} - cpu: [arm] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-ia32@0.18.20: - resolution: {integrity: sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==} - engines: {node: '>=12'} - cpu: [ia32] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-loong64@0.18.20: - resolution: {integrity: sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==} - engines: {node: '>=12'} - cpu: [loong64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-mips64el@0.18.20: - resolution: {integrity: sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==} - engines: {node: '>=12'} - cpu: [mips64el] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-ppc64@0.18.20: - resolution: {integrity: sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-riscv64@0.18.20: - resolution: {integrity: sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==} - engines: {node: '>=12'} - cpu: [riscv64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-s390x@0.18.20: - resolution: {integrity: sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==} - engines: {node: '>=12'} - cpu: [s390x] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/linux-x64@0.18.20: - resolution: {integrity: sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==} - engines: {node: '>=12'} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@esbuild/netbsd-x64@0.18.20: - resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} - engines: {node: '>=12'} - cpu: [x64] - os: [netbsd] - requiresBuild: true - dev: true - optional: true - - /@esbuild/openbsd-x64@0.18.20: - resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} - engines: {node: '>=12'} - cpu: [x64] - os: [openbsd] - requiresBuild: true - dev: true - optional: true - - /@esbuild/sunos-x64@0.18.20: - resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [sunos] - requiresBuild: true - dev: true - optional: true - - /@esbuild/win32-arm64@0.18.20: - resolution: {integrity: sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==} - engines: {node: '>=12'} - cpu: [arm64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@esbuild/win32-ia32@0.18.20: - resolution: {integrity: sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==} - engines: {node: '>=12'} - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@esbuild/win32-x64@0.18.20: - resolution: {integrity: sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@fastify/busboy@2.0.0: - resolution: {integrity: sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==} - engines: {node: '>=14'} - dev: false - - /@jridgewell/gen-mapping@0.3.3: - resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.2 - '@jridgewell/sourcemap-codec': 1.4.15 - '@jridgewell/trace-mapping': 0.3.19 - dev: true - - /@jridgewell/resolve-uri@3.1.1: - resolution: {integrity: sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==} - engines: {node: '>=6.0.0'} - dev: true - - /@jridgewell/set-array@1.1.2: - resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} - engines: {node: '>=6.0.0'} - dev: true - - /@jridgewell/sourcemap-codec@1.4.15: - resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - dev: true - - /@jridgewell/trace-mapping@0.3.19: - resolution: {integrity: sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==} - dependencies: - '@jridgewell/resolve-uri': 3.1.1 - '@jridgewell/sourcemap-codec': 1.4.15 - dev: true - - /@libsql/client@0.3.5: - resolution: {integrity: sha512-4fZxGh0qKW5dtp1yuQLRvRAtbt02V4jzjM9sHSmz5k25xZTLg7/GlNudKdqKZrjJXEV5PvDNsczupBtedZZovw==} - dependencies: - '@libsql/hrana-client': 0.5.5 - js-base64: 3.7.5 - libsql: 0.1.28 - transitivePeerDependencies: - - bufferutil - - encoding - - utf-8-validate - - /@libsql/darwin-arm64@0.1.28: - resolution: {integrity: sha512-p4nldHUOhcl9ibnH1F6oiXV5Dl3PAcPB9VIjdjVvO3/URo5J7mhqRMuwJMKO5DZJJGtkKJ5IO0gu0hc90rnKIg==} - cpu: [arm64] - os: [darwin] - requiresBuild: true - optional: true - - /@libsql/darwin-x64@0.1.28: - resolution: {integrity: sha512-WaEK+Z+wP5sr0h8EcusSGHv4Mqc3smYICeG4P/wsbRDKQ2WUMWqZrpgqaBsm+WPbXogU2vpf+qGc8BnpFZ0ggw==} - cpu: [x64] - os: [darwin] - requiresBuild: true - optional: true - - /@libsql/hrana-client@0.5.5: - resolution: {integrity: sha512-i+hDBpiV719poqEiHupUUZYKJ9YSbCRFe5Q2PQ0v3mHIftePH6gayLjp2u6TXbqbO/Dv6y8yyvYlBXf/kFfRZA==} - dependencies: - '@libsql/isomorphic-fetch': 0.1.10 - '@libsql/isomorphic-ws': 0.1.5 - js-base64: 3.7.5 - node-fetch: 3.3.2 - transitivePeerDependencies: - - bufferutil - - encoding - - utf-8-validate - - /@libsql/isomorphic-fetch@0.1.10: - resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} - dependencies: - '@types/node-fetch': 2.6.6 - node-fetch: 2.7.0 - transitivePeerDependencies: - - encoding - - /@libsql/isomorphic-ws@0.1.5: - resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} - dependencies: - '@types/ws': 8.5.5 - ws: 8.14.2 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - /@libsql/linux-arm64-gnu@0.1.28: - resolution: {integrity: sha512-a17ANBuOqH2L8gdyET4Kg3XggQvxWnoA+7x7sDEX5NyWNyvr7P04WzNPAT0xAOWLclC1fDD6jM5sh/fbJk/7NA==} - cpu: [arm64] - os: [linux] - requiresBuild: true - optional: true - - /@libsql/linux-x64-gnu@0.1.28: - resolution: {integrity: sha512-dkg+Ou7ApV0PHpZWd9c6NrYyc/WSNn5h/ScKotaMTLWlLL96XAMNwrYLpZpUj61I2y7QzU98XtMfiSD1Ux+VaA==} - cpu: [x64] - os: [linux] - requiresBuild: true - optional: true - - /@libsql/linux-x64-musl@0.1.28: - resolution: {integrity: sha512-ZuOxCDYlG+f1IDsxstmaxLtgG9HvlLuUKs0X3um4f5F5V+P+PF8qr08gSdD1IP2pj+JBOiwhQffaEpR1wupxhQ==} - cpu: [x64] - os: [linux] - requiresBuild: true - optional: true - - /@libsql/win32-x64-msvc@0.1.28: - resolution: {integrity: sha512-2cmUiMIsJLHpetebGeeYqUYaCPWEnwMjqxwu1ZEEbA5x8r+DNmIhLrc0QSQ29p7a5u14vbZnShNOtT/XG7vKew==} - cpu: [x64] - os: [win32] - requiresBuild: true - optional: true - - /@neon-rs/load@0.0.4: - resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} - - /@neondatabase/serverless@0.6.0: - resolution: {integrity: sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==} - dependencies: - '@types/pg': 8.6.6 - - /@nodelib/fs.scandir@2.1.5: - resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - dev: true - - /@nodelib/fs.stat@2.0.5: - resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} - engines: {node: '>= 8'} - dev: true - - /@nodelib/fs.walk@1.2.8: - resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.15.0 - dev: true - - /@planetscale/database@1.11.0: - resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} - engines: {node: '>=16'} - - /@prisma/client@5.4.2(prisma@5.4.2): - resolution: {integrity: sha512-2xsPaz4EaMKj1WS9iW6MlPhmbqtBsXAOeVttSePp8vTFTtvzh2hZbDgswwBdSCgPzmmwF+tLB259QzggvCmJqA==} - engines: {node: '>=16.13'} - requiresBuild: true - peerDependencies: - prisma: '*' - peerDependenciesMeta: - prisma: - optional: true - dependencies: - '@prisma/engines-version': 5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574 - prisma: 5.4.2 - dev: false - - /@prisma/engines-version@5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574: - resolution: {integrity: sha512-wvupDL4AA1vf4TQNANg7kR7y98ITqPsk6aacfBxZKtrJKRIsWjURHkZCGcQliHdqCiW/hGreO6d6ZuSv9MhdAA==} - dev: false - - /@prisma/engines@5.4.2: - resolution: {integrity: sha512-fqeucJ3LH0e1eyFdT0zRx+oETLancu5+n4lhiYECyEz6H2RDskPJHJYHkVc0LhkU4Uv7fuEnppKU3nVKNzMh8g==} - requiresBuild: true - - /@types/debug@4.1.8: - resolution: {integrity: sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==} - dependencies: - '@types/ms': 0.7.31 - dev: true - - /@types/ms@0.7.31: - resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} - dev: true - - /@types/node-fetch@2.6.6: - resolution: {integrity: sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==} - dependencies: - '@types/node': 20.6.5 - form-data: 4.0.0 - - /@types/node@20.5.1: - resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} - dev: true - - /@types/node@20.5.9: - resolution: {integrity: sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ==} - - /@types/node@20.6.5: - resolution: {integrity: sha512-2qGq5LAOTh9izcc0+F+dToFigBWiK1phKPt7rNhOqJSr35y8rlIBjDwGtFSgAI6MGIhjwOVNSQZVdJsZJ2uR1w==} - - /@types/pg@8.10.2: - resolution: {integrity: sha512-MKFs9P6nJ+LAeHLU3V0cODEOgyThJ3OAnmOlsZsxux6sfQs3HRXR5bBn7xG5DjckEFhTAxsXi7k7cd0pCMxpJw==} - dependencies: - '@types/node': 20.5.9 - pg-protocol: 1.6.0 - pg-types: 4.0.1 - - /@types/pg@8.6.6: - resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} - dependencies: - '@types/node': 20.5.9 - pg-protocol: 1.6.0 - pg-types: 2.2.0 - - /@types/ws@8.5.5: - resolution: {integrity: sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==} - dependencies: - '@types/node': 20.6.5 - - /any-promise@1.3.0: - resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} - dev: true - - /anymatch@3.1.3: - resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} - engines: {node: '>= 8'} - dependencies: - normalize-path: 3.0.0 - picomatch: 2.3.1 - dev: true - - /array-union@2.1.0: - resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} - engines: {node: '>=8'} - dev: true - - /async-mutex@0.4.0: - resolution: {integrity: sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA==} - dependencies: - tslib: 2.6.2 - dev: false - - /asynckit@0.4.0: - resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} - - /balanced-match@1.0.2: - resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - dev: true - - /binary-extensions@2.2.0: - resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} - engines: {node: '>=8'} - dev: true - - /brace-expansion@1.1.11: - resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} - dependencies: - balanced-match: 1.0.2 - concat-map: 0.0.1 - dev: true - - /braces@3.0.2: - resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} - engines: {node: '>=8'} - dependencies: - fill-range: 7.0.1 - dev: true - - /buffer-from@1.1.2: - resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - dev: true - - /buffer-writer@2.0.0: - resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} - engines: {node: '>=4'} - - /bundle-require@4.0.1(esbuild@0.18.20): - resolution: {integrity: sha512-9NQkRHlNdNpDBGmLpngF3EFDcwodhMUuLz9PaWYciVcQF9SE4LFjM2DB/xV1Li5JiuDMv7ZUWuC3rGbqR0MAXQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - peerDependencies: - esbuild: '>=0.17' - dependencies: - esbuild: 0.18.20 - load-tsconfig: 0.2.5 - dev: true - - /cac@6.7.14: - resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} - engines: {node: '>=8'} - dev: true - - /chokidar@3.5.3: - resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} - engines: {node: '>= 8.10.0'} - dependencies: - anymatch: 3.1.3 - braces: 3.0.2 - glob-parent: 5.1.2 - is-binary-path: 2.1.0 - is-glob: 4.0.3 - normalize-path: 3.0.0 - readdirp: 3.6.0 - optionalDependencies: - fsevents: 2.3.3 - dev: true - - /combined-stream@1.0.8: - resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} - engines: {node: '>= 0.8'} - dependencies: - delayed-stream: 1.0.0 - - /commander@4.1.1: - resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} - engines: {node: '>= 6'} - dev: true - - /concat-map@0.0.1: - resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} - dev: true - - /copy-anything@3.0.5: - resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} - engines: {node: '>=12.13'} - dependencies: - is-what: 4.1.15 - dev: false - - /cross-env@7.0.3: - resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} - engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} - hasBin: true - dependencies: - cross-spawn: 7.0.3 - dev: true - - /cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - dev: true - - /data-uri-to-buffer@4.0.1: - resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} - engines: {node: '>= 12'} - - /debug@4.3.4: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - - /delayed-stream@1.0.0: - resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} - engines: {node: '>=0.4.0'} - - /detect-libc@2.0.2: - resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} - engines: {node: '>=8'} - - /dir-glob@3.0.1: - resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} - engines: {node: '>=8'} - dependencies: - path-type: 4.0.0 - dev: true - - /esbuild@0.18.20: - resolution: {integrity: sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==} - engines: {node: '>=12'} - hasBin: true - requiresBuild: true - optionalDependencies: - '@esbuild/android-arm': 0.18.20 - '@esbuild/android-arm64': 0.18.20 - '@esbuild/android-x64': 0.18.20 - '@esbuild/darwin-arm64': 0.18.20 - '@esbuild/darwin-x64': 0.18.20 - '@esbuild/freebsd-arm64': 0.18.20 - '@esbuild/freebsd-x64': 0.18.20 - '@esbuild/linux-arm': 0.18.20 - '@esbuild/linux-arm64': 0.18.20 - '@esbuild/linux-ia32': 0.18.20 - '@esbuild/linux-loong64': 0.18.20 - '@esbuild/linux-mips64el': 0.18.20 - '@esbuild/linux-ppc64': 0.18.20 - '@esbuild/linux-riscv64': 0.18.20 - '@esbuild/linux-s390x': 0.18.20 - '@esbuild/linux-x64': 0.18.20 - '@esbuild/netbsd-x64': 0.18.20 - '@esbuild/openbsd-x64': 0.18.20 - '@esbuild/sunos-x64': 0.18.20 - '@esbuild/win32-arm64': 0.18.20 - '@esbuild/win32-ia32': 0.18.20 - '@esbuild/win32-x64': 0.18.20 - dev: true - - /execa@5.1.1: - resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} - engines: {node: '>=10'} - dependencies: - cross-spawn: 7.0.3 - get-stream: 6.0.1 - human-signals: 2.1.0 - is-stream: 2.0.1 - merge-stream: 2.0.0 - npm-run-path: 4.0.1 - onetime: 5.1.2 - signal-exit: 3.0.7 - strip-final-newline: 2.0.0 - dev: true - - /fast-glob@3.3.1: - resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==} - engines: {node: '>=8.6.0'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - '@nodelib/fs.walk': 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.5 - dev: true - - /fastq@1.15.0: - resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} - dependencies: - reusify: 1.0.4 - dev: true - - /fetch-blob@3.2.0: - resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} - engines: {node: ^12.20 || >= 14.13} - dependencies: - node-domexception: 1.0.0 - web-streams-polyfill: 3.2.1 - - /fill-range@7.0.1: - resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} - engines: {node: '>=8'} - dependencies: - to-regex-range: 5.0.1 - dev: true - - /form-data@4.0.0: - resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} - engines: {node: '>= 6'} - dependencies: - asynckit: 0.4.0 - combined-stream: 1.0.8 - mime-types: 2.1.35 - - /formdata-polyfill@4.0.10: - resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} - engines: {node: '>=12.20.0'} - dependencies: - fetch-blob: 3.2.0 - - /fs.realpath@1.0.0: - resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} - dev: true - - /fsevents@2.3.3: - resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /get-stream@6.0.1: - resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} - engines: {node: '>=10'} - dev: true - - /get-tsconfig@4.7.0: - resolution: {integrity: sha512-pmjiZ7xtB8URYm74PlGJozDNyhvsVLUcpBa8DZBG3bWHwaHa9bPiRpiSfovw+fjhwONSCWKRyk+JQHEGZmMrzw==} - dependencies: - resolve-pkg-maps: 1.0.0 - dev: true - - /glob-parent@5.1.2: - resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} - engines: {node: '>= 6'} - dependencies: - is-glob: 4.0.3 - dev: true - - /glob@7.1.6: - resolution: {integrity: sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==} - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 3.1.2 - once: 1.4.0 - path-is-absolute: 1.0.1 - dev: true - - /globby@11.1.0: - resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} - engines: {node: '>=10'} - dependencies: - array-union: 2.1.0 - dir-glob: 3.0.1 - fast-glob: 3.3.1 - ignore: 5.2.4 - merge2: 1.4.1 - slash: 3.0.0 - dev: true - - /human-signals@2.1.0: - resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} - engines: {node: '>=10.17.0'} - dev: true - - /ignore@5.2.4: - resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==} - engines: {node: '>= 4'} - dev: true - - /inflight@1.0.6: - resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} - dependencies: - once: 1.4.0 - wrappy: 1.0.2 - dev: true - - /inherits@2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - dev: true - - /is-binary-path@2.1.0: - resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} - engines: {node: '>=8'} - dependencies: - binary-extensions: 2.2.0 - dev: true - - /is-extglob@2.1.1: - resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} - engines: {node: '>=0.10.0'} - dev: true - - /is-glob@4.0.3: - resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} - engines: {node: '>=0.10.0'} - dependencies: - is-extglob: 2.1.1 - dev: true - - /is-number@7.0.0: - resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} - engines: {node: '>=0.12.0'} - dev: true - - /is-stream@2.0.1: - resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} - engines: {node: '>=8'} - dev: true - - /is-what@4.1.15: - resolution: {integrity: sha512-uKua1wfy3Yt+YqsD6mTUEa2zSi3G1oPlqTflgaPJ7z63vUGN5pxFpnQfeSLMFnJDEsdvOtkp1rUWkYjB4YfhgA==} - engines: {node: '>=12.13'} - dev: false - - /isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - dev: true - - /joycon@3.1.1: - resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} - engines: {node: '>=10'} - dev: true - - /js-base64@3.7.5: - resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} - - /libsql@0.1.28: - resolution: {integrity: sha512-yCKlT0ntV8ZIWTPGNClhQQeH/LNAzLjbbEgBvgLb+jfQwAuTbyvPpVVLwkZzesqja1nbkWApztW0pX81Jp0pkw==} - cpu: [x64, arm64] - os: [darwin, linux, win32] - dependencies: - '@neon-rs/load': 0.0.4 - detect-libc: 2.0.2 - optionalDependencies: - '@libsql/darwin-arm64': 0.1.28 - '@libsql/darwin-x64': 0.1.28 - '@libsql/linux-arm64-gnu': 0.1.28 - '@libsql/linux-x64-gnu': 0.1.28 - '@libsql/linux-x64-musl': 0.1.28 - '@libsql/win32-x64-msvc': 0.1.28 - - /lilconfig@2.1.0: - resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} - engines: {node: '>=10'} - dev: true - - /lines-and-columns@1.2.4: - resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} - dev: true - - /load-tsconfig@0.2.5: - resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dev: true - - /lodash.sortby@4.7.0: - resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} - dev: true - - /merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - dev: true - - /merge2@1.4.1: - resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} - engines: {node: '>= 8'} - dev: true - - /micromatch@4.0.5: - resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} - engines: {node: '>=8.6'} - dependencies: - braces: 3.0.2 - picomatch: 2.3.1 - dev: true - - /mime-db@1.52.0: - resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} - engines: {node: '>= 0.6'} - - /mime-types@2.1.35: - resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.52.0 - - /mimic-fn@2.1.0: - resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} - engines: {node: '>=6'} - dev: true - - /minimatch@3.1.2: - resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} - dependencies: - brace-expansion: 1.1.11 - dev: true - - /ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - - /mz@2.7.0: - resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} - dependencies: - any-promise: 1.3.0 - object-assign: 4.1.1 - thenify-all: 1.6.0 - dev: true - - /node-domexception@1.0.0: - resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} - engines: {node: '>=10.5.0'} - - /node-fetch@2.7.0: - resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - dependencies: - whatwg-url: 5.0.0 - - /node-fetch@3.3.2: - resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - data-uri-to-buffer: 4.0.1 - fetch-blob: 3.2.0 - formdata-polyfill: 4.0.10 - - /normalize-path@3.0.0: - resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} - engines: {node: '>=0.10.0'} - dev: true - - /npm-run-path@4.0.1: - resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} - engines: {node: '>=8'} - dependencies: - path-key: 3.1.1 - dev: true - - /object-assign@4.1.1: - resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} - engines: {node: '>=0.10.0'} - dev: true - - /obuf@1.1.2: - resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} - - /once@1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - dependencies: - wrappy: 1.0.2 - dev: true - - /onetime@5.1.2: - resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} - engines: {node: '>=6'} - dependencies: - mimic-fn: 2.1.0 - dev: true - - /packet-reader@1.0.0: - resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} - - /path-is-absolute@1.0.1: - resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} - engines: {node: '>=0.10.0'} - dev: true - - /path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} - dev: true - - /path-type@4.0.0: - resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} - engines: {node: '>=8'} - dev: true - - /pg-cloudflare@1.1.1: - resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} - requiresBuild: true - optional: true - - /pg-connection-string@2.6.2: - resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} - - /pg-int8@1.0.1: - resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} - engines: {node: '>=4.0.0'} - - /pg-numeric@1.0.2: - resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} - engines: {node: '>=4'} - - /pg-pool@3.6.1(pg@8.11.3): - resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} - peerDependencies: - pg: '>=8.0' - dependencies: - pg: 8.11.3 - - /pg-protocol@1.6.0: - resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} - - /pg-types@2.2.0: - resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} - engines: {node: '>=4'} - dependencies: - pg-int8: 1.0.1 - postgres-array: 2.0.0 - postgres-bytea: 1.0.0 - postgres-date: 1.0.7 - postgres-interval: 1.2.0 - - /pg-types@4.0.1: - resolution: {integrity: sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==} - engines: {node: '>=10'} - dependencies: - pg-int8: 1.0.1 - pg-numeric: 1.0.2 - postgres-array: 3.0.2 - postgres-bytea: 3.0.0 - postgres-date: 2.0.1 - postgres-interval: 3.0.0 - postgres-range: 1.1.3 - - /pg@8.11.3: - resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} - engines: {node: '>= 8.0.0'} - peerDependencies: - pg-native: '>=3.0.1' - peerDependenciesMeta: - pg-native: - optional: true - dependencies: - buffer-writer: 2.0.0 - packet-reader: 1.0.0 - pg-connection-string: 2.6.2 - pg-pool: 3.6.1(pg@8.11.3) - pg-protocol: 1.6.0 - pg-types: 2.2.0 - pgpass: 1.0.5 - optionalDependencies: - pg-cloudflare: 1.1.1 - - /pgpass@1.0.5: - resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} - dependencies: - split2: 4.2.0 - - /picomatch@2.3.1: - resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} - engines: {node: '>=8.6'} - dev: true - - /pirates@4.0.6: - resolution: {integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==} - engines: {node: '>= 6'} - dev: true - - /postcss-load-config@4.0.1: - resolution: {integrity: sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA==} - engines: {node: '>= 14'} - peerDependencies: - postcss: '>=8.0.9' - ts-node: '>=9.0.0' - peerDependenciesMeta: - postcss: - optional: true - ts-node: - optional: true - dependencies: - lilconfig: 2.1.0 - yaml: 2.3.2 - dev: true - - /postgres-array@2.0.0: - resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} - engines: {node: '>=4'} - - /postgres-array@3.0.2: - resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} - engines: {node: '>=12'} - - /postgres-bytea@1.0.0: - resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} - engines: {node: '>=0.10.0'} - - /postgres-bytea@3.0.0: - resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==} - engines: {node: '>= 6'} - dependencies: - obuf: 1.1.2 - - /postgres-date@1.0.7: - resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} - engines: {node: '>=0.10.0'} - - /postgres-date@2.0.1: - resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} - engines: {node: '>=12'} - - /postgres-interval@1.2.0: - resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} - engines: {node: '>=0.10.0'} - dependencies: - xtend: 4.0.2 - - /postgres-interval@3.0.0: - resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} - engines: {node: '>=12'} - - /postgres-range@1.1.3: - resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} - - /prisma@5.4.2: - resolution: {integrity: sha512-GDMZwZy7mysB2oXU+angQqJ90iaPFdD0rHaZNkn+dio5NRkGLmMqmXs31//tg/qXT3iB0cTQwnGGQNuirhSTZg==} - engines: {node: '>=16.13'} - hasBin: true - requiresBuild: true - dependencies: - '@prisma/engines': 5.4.2 - - /punycode@2.3.0: - resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} - engines: {node: '>=6'} - dev: true - - /queue-microtask@1.2.3: - resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - dev: true - - /readdirp@3.6.0: - resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} - engines: {node: '>=8.10.0'} - dependencies: - picomatch: 2.3.1 - dev: true - - /resolve-from@5.0.0: - resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} - engines: {node: '>=8'} - dev: true - - /resolve-pkg-maps@1.0.0: - resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} - dev: true - - /reusify@1.0.4: - resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} - engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - dev: true - - /rollup@3.28.1: - resolution: {integrity: sha512-R9OMQmIHJm9znrU3m3cpE8uhN0fGdXiawME7aZIpQqvpS/85+Vt1Hq1/yVIcYfOmaQiHjvXkQAoJukvLpau6Yw==} - engines: {node: '>=14.18.0', npm: '>=8.0.0'} - hasBin: true - optionalDependencies: - fsevents: 2.3.3 - dev: true - - /run-parallel@1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - dependencies: - queue-microtask: 1.2.3 - dev: true - - /shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} - dependencies: - shebang-regex: 3.0.0 - dev: true - - /shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - dev: true - - /signal-exit@3.0.7: - resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} - dev: true - - /slash@3.0.0: - resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} - engines: {node: '>=8'} - dev: true - - /source-map-support@0.5.21: - resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} - dependencies: - buffer-from: 1.1.2 - source-map: 0.6.1 - dev: true - - /source-map@0.6.1: - resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} - engines: {node: '>=0.10.0'} - dev: true - - /source-map@0.8.0-beta.0: - resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} - engines: {node: '>= 8'} - dependencies: - whatwg-url: 7.1.0 - dev: true - - /split2@4.2.0: - resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} - engines: {node: '>= 10.x'} - - /strip-final-newline@2.0.0: - resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} - engines: {node: '>=6'} - dev: true - - /sucrase@3.34.0: - resolution: {integrity: sha512-70/LQEZ07TEcxiU2dz51FKaE6hCTWC6vr7FOk3Gr0U60C3shtAN+H+BFr9XlYe5xqf3RA8nrc+VIwzCfnxuXJw==} - engines: {node: '>=8'} - hasBin: true - dependencies: - '@jridgewell/gen-mapping': 0.3.3 - commander: 4.1.1 - glob: 7.1.6 - lines-and-columns: 1.2.4 - mz: 2.7.0 - pirates: 4.0.6 - ts-interface-checker: 0.1.13 - dev: true - - /superjson@1.13.1: - resolution: {integrity: sha512-AVH2eknm9DEd3qvxM4Sq+LTCkSXE2ssfh1t11MHMXyYXFQyQ1HLgVvV+guLTsaQnJU3gnaVo34TohHPulY/wLg==} - engines: {node: '>=10'} - dependencies: - copy-anything: 3.0.5 - dev: false - - /thenify-all@1.6.0: - resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} - engines: {node: '>=0.8'} - dependencies: - thenify: 3.3.1 - dev: true - - /thenify@3.3.1: - resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} - dependencies: - any-promise: 1.3.0 - dev: true - - /to-regex-range@5.0.1: - resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} - engines: {node: '>=8.0'} - dependencies: - is-number: 7.0.0 - dev: true - - /tr46@0.0.3: - resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} - - /tr46@1.0.1: - resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} - dependencies: - punycode: 2.3.0 - dev: true - - /tree-kill@1.2.2: - resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} - hasBin: true - dev: true - - /ts-interface-checker@0.1.13: - resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} - dev: true - - /tslib@2.6.2: - resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} - dev: false - - /tsup@7.2.0(typescript@5.1.6): - resolution: {integrity: sha512-vDHlczXbgUvY3rWvqFEbSqmC1L7woozbzngMqTtL2PGBODTtWlRwGDDawhvWzr5c1QjKe4OAKqJGfE1xeXUvtQ==} - engines: {node: '>=16.14'} - hasBin: true - peerDependencies: - '@swc/core': ^1 - postcss: ^8.4.12 - typescript: '>=4.1.0' - peerDependenciesMeta: - '@swc/core': - optional: true - postcss: - optional: true - typescript: - optional: true - dependencies: - bundle-require: 4.0.1(esbuild@0.18.20) - cac: 6.7.14 - chokidar: 3.5.3 - debug: 4.3.4 - esbuild: 0.18.20 - execa: 5.1.1 - globby: 11.1.0 - joycon: 3.1.1 - postcss-load-config: 4.0.1 - resolve-from: 5.0.0 - rollup: 3.28.1 - source-map: 0.8.0-beta.0 - sucrase: 3.34.0 - tree-kill: 1.2.2 - typescript: 5.1.6 - transitivePeerDependencies: - - supports-color - - ts-node - dev: true - - /tsx@3.12.7: - resolution: {integrity: sha512-C2Ip+jPmqKd1GWVQDvz/Eyc6QJbGfE7NrR3fx5BpEHMZsEHoIxHL1j+lKdGobr8ovEyqeNkPLSKp6SCSOt7gmw==} - hasBin: true - dependencies: - '@esbuild-kit/cjs-loader': 2.4.2 - '@esbuild-kit/core-utils': 3.2.2 - '@esbuild-kit/esm-loader': 2.5.5 - optionalDependencies: - fsevents: 2.3.3 - dev: true - - /typescript@5.1.6: - resolution: {integrity: sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==} - engines: {node: '>=14.17'} - hasBin: true - dev: true - - /undici@5.26.2: - resolution: {integrity: sha512-a4PDLQgLTPHVzOK+x3F79/M4GtyYPl+aX9AAK7aQxpwxDwCqkeZCScy7Gk5kWT3JtdFq1uhO3uZJdLtHI4dK9A==} - engines: {node: '>=14.0'} - dependencies: - '@fastify/busboy': 2.0.0 - dev: false - - /web-streams-polyfill@3.2.1: - resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} - engines: {node: '>= 8'} - - /webidl-conversions@3.0.1: - resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} - - /webidl-conversions@4.0.2: - resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} - dev: true - - /whatwg-url@5.0.0: - resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} - dependencies: - tr46: 0.0.3 - webidl-conversions: 3.0.1 - - /whatwg-url@7.1.0: - resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} - dependencies: - lodash.sortby: 4.7.0 - tr46: 1.0.1 - webidl-conversions: 4.0.2 - dev: true - - /which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true - dependencies: - isexe: 2.0.0 - dev: true - - /wrappy@1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - dev: true - - /ws@8.14.2: - resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} - engines: {node: '>=10.0.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: '>=5.0.2' - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - - /xtend@4.0.2: - resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} - engines: {node: '>=0.4'} - - /yaml@2.3.2: - resolution: {integrity: sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==} - engines: {node: '>= 14'} - dev: true diff --git a/query-engine/driver-adapters/js/pnpm-workspace.yaml b/query-engine/driver-adapters/js/pnpm-workspace.yaml deleted file mode 100644 index f9e70da7ee5a..000000000000 --- a/query-engine/driver-adapters/js/pnpm-workspace.yaml +++ /dev/null @@ -1,8 +0,0 @@ -packages: - - './adapter-libsql' - - './adapter-neon' - - './adapter-pg' - - './adapter-planetscale' - - './connector-test-kit-executor' - - './driver-adapter-utils' - - './smoke-test-js' diff --git a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example b/query-engine/driver-adapters/js/smoke-test-js/.envrc.example deleted file mode 100644 index 15a286787cbd..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.envrc.example +++ /dev/null @@ -1,26 +0,0 @@ -# Uncomment "source_up" if you need to load the .envrc at the root of the -# `prisma-engines` repository before loading this one (for example, if you -# are using Nix). -# -# source_up - -export JS_PLANETSCALE_DATABASE_URL="mysql://USER:PASSWORD@aws.connect.psdb.cloud/DATABASE?sslaccept=strict" -export JS_NEON_DATABASE_URL="postgres://USER:PASSWORD@DATABASE-pooler.eu-central-1.aws.neon.tech/neondb?pgbouncer=true&connect_timeout=10" - -# Note: if you use hosted Postgres instances (e.g., from PDP provision), you need `?sslmode=disable` -export JS_PG_DATABASE_URL="postgres://postgres:prisma@localhost:5438" - -# Set this to a `file:` URL when using a local sqlite database (either -# standalone or as an embedded replica). Otherwise, when using a remote Turso -# (or sqld) database in HTTP mode directly without an embedded replica, set its -# URL here. -export JS_LIBSQL_DATABASE_URL="file:${PWD}/libsql.db" - -# # Set this to the URL of remote Turso database when using an embedded replica. -# export JS_LIBSQL_SYNC_URL="" - -# # Provide an auth token when using a remote Turso database. -# export JS_LIBSQL_AUTH_TOKEN="" - -# Can be one of "number" (the default when nothing is specified), "bigint" or "string". "bigint" works best with Prisma. -export JS_LIBSQL_INT_MODE="bigint" diff --git a/query-engine/driver-adapters/js/smoke-test-js/.gitignore b/query-engine/driver-adapters/js/smoke-test-js/.gitignore deleted file mode 100644 index be550f99317f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -libsql.db -libsql.db-journal -libsql.db-shm -libsql.db-wal diff --git a/query-engine/driver-adapters/js/smoke-test-js/README.md b/query-engine/driver-adapters/js/smoke-test-js/README.md deleted file mode 100644 index f1b81df5d268..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# @prisma/driver-adapters-smoke-tests-js - -This is a playground for testing the `libquery` client with the experimental Node.js drivers. -It contains a subset of `@prisma/client`, plus some handy executable smoke tests: -- [`./src/libquery`](./src/libquery): it contains smoke tests using a local `libquery`, the Query Engine library. -- [`./src/client`](./src/client): it contains smoke tests using `@prisma/client`. - -## How to setup - -We assume a recent Node.js is installed (e.g., `v20.5.x`). If not, run `nvm use` in the current directory. -It's very important to double-check if you have multiple versions installed, as both PlanetScale and Neon requires either Node.js `v18`+ or a custom `fetch` function. - -In the parent directory (`cd ..`): -- Build the driver adapters via `pnpm i && pnpm build` - -In the current directoy: -- Create a `.envrc` starting from `.envrc.example`, and fill in the missing values following the given template -- Install Node.js dependencies via - ```bash - pnpm i - ``` - -(or run `sh ./setup.sh`) - -Anywhere in the repository: -- Run `cargo build -p query-engine-node-api` to compile the `libquery` Query Engine - -### PlanetScale - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database on [PlanetScale](https://planetscale.com/) -- Go to `Settings` > `Passwords`, and create a new password for the `main` database branch. Select the `Prisma` template and copy the generated URL (comprising username, password, etc). -- Paste it in the `JS_PLANETSCALE_DATABASE_URL` environment variable in `.envrc`. - -In the current directory: -- Run `pnpm prisma:planetscale` to push the Prisma schema and insert the test data. -- Run `pnpm planetscale` to run smoke tests using `libquery` against the PlanetScale database. - For more fine-grained control: - - Run `pnpm planetscale:libquery` to test using `libquery` - - Run `pnpm planetscale:client` to test using `@prisma/client` - -### Neon - -If you don't have a connection string yet: - -- [Follow the Notion document](https://www.notion.so/How-to-get-a-PlanetScale-and-Neon-database-for-testing-93d978061f9c4ffc80ebfed36896af16) or create a new database with Neon CLI `npx neonctl projects create` or in [Neon Console](https://neon.tech). -- Paste the connection string to `JS_NEON_DATABASE_URL`. - -In the current directory: -- Run `pnpm prisma:neon` to push the Prisma schema and insert the test data. -- Run `pnpm neon:ws` to run smoke tests using `libquery` against the Neon database, using a WebSocket connection. - For more fine-grained control: - - Run `pnpm neon:ws:libquery` to test using `libquery` - - Run `pnpm neon:ws:client` to test using `@prisma/client` -- Run `pnpm neon:http` to run smoke tests using `libquery` against the Neon database, using an HTTP connection. In this case, transactions won't work, and tests are expected to fail. - For more fine-grained control: - - Run `pnpm neon:http:libquery` to test using `libquery` - - Run `pnpm neon:http:client` to test using `@prisma/client` - -### Pg - -Start database via `docker compose up postgres15` in `/docker`. - -In the current directory: -- Run `pnpm prisma:pg` to push the Prisma schema and insert the test data. -- Run `pnpm pg` to run smoke tests using `libquery` against the PostgreSQL database, using `pg` - For more fine-grained control: - - Run `pnpm pg:libquery` to test using `libquery` - - Run `pnpm pg:client` to test using `@prisma/client` - -### Libsql - -In the current directory: -- Run `pnpm prisma:libsql` to push the Prisma schema and insert the test data. -- Run `pnpm libsql` to run smoke tests using `libquery` against the SQLite database, using `libSQL` - For more fine-grained control: - - Run `pnpm libsql:libquery` to test using `libquery` - - Run `pnpm libsql:client` to test using `@prisma/client` \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/package.json b/query-engine/driver-adapters/js/smoke-test-js/package.json deleted file mode 100644 index 31362c1cc873..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "@prisma/driver-adapters-smoke-tests-js", - "private": true, - "type": "module", - "version": "5.4.0", - "description": "", - "scripts": { - "prisma:db:push:postgres": "prisma db push --schema ./prisma/postgres/schema.prisma --force-reset", - "prisma:db:execute:postgres": "prisma db execute --schema ./prisma/postgres/schema.prisma --file ./prisma/postgres/commands/type_test/insert.sql", - "prisma:studio:postgres": "prisma studio --schema ./prisma/postgres/schema.prisma", - "prisma:db:push:mysql": "prisma db push --schema ./prisma/mysql/schema.prisma --force-reset", - "prisma:db:execute:mysql": "prisma db execute --schema ./prisma/mysql/schema.prisma --file ./prisma/mysql/commands/type_test/insert.sql", - "prisma:db:push:sqlite": "prisma db push --schema ./prisma/sqlite/schema.prisma --force-reset", - "prisma:db:execute:sqlite": "prisma db execute --schema ./prisma/sqlite/schema.prisma --file ./prisma/sqlite/commands/type_test/insert.sql", - "prisma:studio:mysql": "prisma studio --schema ./prisma/mysql/schema.prisma", - "prisma:neon:ws": "pnpm prisma:neon", - "prisma:neon:http": "pnpm prisma:neon", - "prisma:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:neon": "cross-env-shell DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "neon:ws:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.ws.test.ts", - "neon:http:libquery": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/neon.http.test.ts", - "neon:ws:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.ws.test.ts", - "neon:http:client": "DATABASE_URL=\"${JS_NEON_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/neon.http.test.ts", - "neon:ws": "pnpm neon:ws:libquery && pnpm neon:ws:client", - "neon:http": "pnpm neon:http:libquery && pnpm neon:http:client", - "prisma:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:db:push:postgres && pnpm prisma:db:execute:postgres\"", - "studio:pg": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" \"pnpm prisma:studio:postgres\"", - "pg:libquery": "cross-env-shell DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/pg.test.ts", - "pg:client": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/pg.test.ts", - "pg": "pnpm pg:libquery && pnpm pg:client", - "errors": "DATABASE_URL=\"${JS_PG_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/errors.test.ts", - "prisma:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:db:push:mysql && pnpm prisma:db:execute:mysql\"", - "studio:planetscale": "cross-env-shell DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" \"pnpm prisma:studio:mysql\"", - "planetscale:libquery": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/planetscale.test.ts", - "planetscale:client": "DATABASE_URL=\"${JS_PLANETSCALE_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/planetscale.test.ts", - "planetscale": "pnpm planetscale:libquery && pnpm planetscale:client", - "prisma:libsql": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" \"pnpm prisma:db:push:sqlite && pnpm prisma:db:execute:sqlite\"", - "libsql:libquery": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/libquery/libsql.test.ts", - "libsql:client": "cross-env-shell DATABASE_URL=\"${JS_LIBSQL_DATABASE_URL}\" node --test --test-reporter spec --loader=tsx ./src/client/libsql.test.ts", - "libsql": "pnpm libsql:libquery && pnpm libsql:client" - }, - "keywords": [], - "author": "Alberto Schiabel ", - "license": "Apache-2.0", - "sideEffects": true, - "dependencies": { - "@libsql/client": "0.3.5", - "@neondatabase/serverless": "^0.6.0", - "@planetscale/database": "^1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/client": "5.4.2", - "@prisma/driver-adapter-utils": "workspace:*", - "pg": "^8.11.3", - "superjson": "^1.13.1", - "undici": "^5.26.2" - }, - "devDependencies": { - "@types/node": "^20.5.1", - "@types/pg": "^8.10.2", - "cross-env": "^7.0.3", - "prisma": "5.4.2", - "tsx": "^3.12.7" - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql deleted file mode 100644 index 6641eff216b2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/commands/type_test/insert.sql +++ /dev/null @@ -1,51 +0,0 @@ -INSERT INTO type_test ( - tinyint_column, - smallint_column, - mediumint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - bit_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - year_column, - datetime_column, - timestamp_column, - json_column, - enum_column, - binary_column, - varbinary_column, - blob_column, - set_column -) VALUES ( - 127, -- tinyint - 32767, -- smallint - 8388607, -- mediumint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 1, -- bit - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - 2023, -- year - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3', -- enum - 0x4D7953514C, -- binary - 0x48656C6C6F20, -- varbinary - _binary 'binary', -- blob - 'option1,option3' -- set -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma deleted file mode 100644 index 59efb33a5594..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/mysql/schema.prisma +++ /dev/null @@ -1,125 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "mysql" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - tinyint_column Int @db.TinyInt - tinyint_column_null Int? @db.TinyInt - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - mediumint_column Int @db.MediumInt - mediumint_column_null Int? @db.MediumInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Float - float_column_null Float? @db.Float - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - bit_column Boolean @db.Bit(1) - bit_column_null Boolean? @db.Bit(1) - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String @db.Text - text_column_null String? @db.Text - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - year_column Int @db.Year - year_column_null Int? @db.Year - datetime_column DateTime @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? - binary_column Bytes @db.Binary(64) - binary_column_null Bytes? @db.Binary(64) - varbinary_column Bytes @db.VarBinary(128) - varbinary_column_null Bytes? @db.VarBinary(128) - blob_column Bytes @db.Blob - blob_null Bytes? @db.Blob - set_column String - set_column_null String? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.DateTime(3) - datetime_column_null DateTime? @db.DateTime(3) -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql deleted file mode 100644 index 170bafb9d810..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/commands/type_test/insert.sql +++ /dev/null @@ -1,35 +0,0 @@ -INSERT INTO type_test ( - smallint_column, - int_column, - bigint_column, - float_column, - double_column, - decimal_column, - boolean_column, - char_column, - varchar_column, - text_column, - date_column, - time_column, - datetime_column, - timestamp_column, - json_column, - enum_column -) VALUES ( - 32767, -- smallint - 2147483647, -- int - 9223372036854775807, -- bigint - 3.402823466, -- float - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'c', -- char - 'Sample varchar', -- varchar - 'This is a long text...', -- text - '2023-07-24', -- date - '23:59:59', -- time - '2023-07-24 23:59:59.415', -- datetime - '2023-07-24 23:59:59', -- timestamp - '{"key": "value"}', -- json - 'value3' -- enum -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma deleted file mode 100644 index 7cd31f406b9d..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/postgres/schema.prisma +++ /dev/null @@ -1,117 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "postgres" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - smallint_column Int @db.SmallInt - smallint_column_null Int? @db.SmallInt - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - float_column Float @db.Real - float_column_null Float? @db.Real - double_column Float - double_column_null Float? - decimal_column Decimal @db.Decimal(10, 2) - decimal_column_null Decimal? @db.Decimal(10, 2) - boolean_column Boolean - boolean_column_null Boolean? - char_column String @db.Char(10) - char_column_null String? @db.Char(10) - varchar_column String @db.VarChar(255) - varchar_column_null String? @db.VarChar(255) - text_column String - text_column_null String? - date_column DateTime @db.Date - date_column_null DateTime? @db.Date - time_column DateTime @db.Time(0) - time_column_null DateTime? @db.Time(0) - datetime_column DateTime @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) - timestamp_column DateTime @db.Timestamp(0) - timestamp_column_null DateTime? @db.Timestamp(0) - json_column Json - json_column_null Json? - enum_column type_test_enum_column - enum_column_null type_test_enum_column_null? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) @db.Timestamp(3) - datetime_column_null DateTime? @db.Timestamp(3) -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -enum type_test_enum_column { - value1 - value2 - value3 -} - -enum type_test_enum_column_null { - value1 - value2 - value3 -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties Json - properties_null Json? - users User[] -} - -model User { - id String @id @default(uuid()) - email String - favoriteProduct Product? @relation(fields: [productId], references: [id]) - productId String? -} - -model Unique { - email String @id -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql deleted file mode 100644 index 014592d2fa2c..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/commands/type_test/insert.sql +++ /dev/null @@ -1,17 +0,0 @@ -INSERT INTO type_test ( - int_column, - bigint_column, - double_column, - decimal_column, - boolean_column, - text_column, - datetime_column -) VALUES ( - 2147483647, -- int - 9223372036854775807, -- bigint - 1.7976931348623157, -- double - 99999999.99, -- decimal - TRUE, -- boolean - 'This is a long text...', -- text - '2023-07-24 23:59:59.415' -- datetime -); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql deleted file mode 100644 index 31c63d423e22..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/20230915202554_init/migration.sql +++ /dev/null @@ -1,85 +0,0 @@ --- CreateTable -CREATE TABLE "type_test" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "int_column" INTEGER NOT NULL, - "int_column_null" INTEGER, - "bigint_column" BIGINT NOT NULL, - "bigint_column_null" BIGINT, - "double_column" REAL NOT NULL, - "double_column_null" REAL, - "decimal_column" DECIMAL NOT NULL, - "decimal_column_null" DECIMAL, - "boolean_column" BOOLEAN NOT NULL, - "boolean_column_null" BOOLEAN, - "text_column" TEXT NOT NULL, - "text_column_null" TEXT, - "datetime_column" DATETIME NOT NULL, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_2" ( - "id" TEXT NOT NULL PRIMARY KEY, - "datetime_column" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - "datetime_column_null" DATETIME -); - --- CreateTable -CREATE TABLE "type_test_3" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "bytes" BLOB NOT NULL -); - --- CreateTable -CREATE TABLE "Child" ( - "c" TEXT NOT NULL, - "c_1" TEXT NOT NULL, - "c_2" TEXT NOT NULL, - "parentId" TEXT, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "Parent" ( - "p" TEXT NOT NULL, - "p_1" TEXT NOT NULL, - "p_2" TEXT NOT NULL, - "non_unique" TEXT, - "id" TEXT NOT NULL PRIMARY KEY -); - --- CreateTable -CREATE TABLE "authors" ( - "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - "firstName" TEXT NOT NULL, - "lastName" TEXT NOT NULL, - "age" INTEGER NOT NULL -); - --- CreateTable -CREATE TABLE "Product" ( - "id" TEXT NOT NULL PRIMARY KEY, - "properties" TEXT NOT NULL, - "properties_null" TEXT -); - --- CreateTable -CREATE TABLE "Unique" ( - "email" TEXT NOT NULL PRIMARY KEY, -); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_key" ON "Child"("c"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_parentId_key" ON "Child"("parentId"); - --- CreateIndex -CREATE UNIQUE INDEX "Child_c_1_c_2_key" ON "Child"("c_1", "c_2"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_key" ON "Parent"("p"); - --- CreateIndex -CREATE UNIQUE INDEX "Parent_p_1_p_2_key" ON "Parent"("p_1", "p_2"); diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml deleted file mode 100644 index e5e5c4705ab0..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/migrations/migration_lock.toml +++ /dev/null @@ -1,3 +0,0 @@ -# Please do not edit this file manually -# It should be added in your version-control system (i.e. Git) -provider = "sqlite" \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma b/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma deleted file mode 100644 index bde23dee66ac..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/prisma/sqlite/schema.prisma +++ /dev/null @@ -1,79 +0,0 @@ -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters"] -} - -datasource db { - provider = "sqlite" - url = env("DATABASE_URL") -} - -model type_test { - id Int @id @default(autoincrement()) - int_column Int - int_column_null Int? - bigint_column BigInt - bigint_column_null BigInt? - double_column Float - double_column_null Float? - decimal_column Decimal - decimal_column_null Decimal? - boolean_column Boolean - boolean_column_null Boolean? - text_column String - text_column_null String? - datetime_column DateTime - datetime_column_null DateTime? -} - -// This will eventually supersede type_test -model type_test_2 { - id String @id @default(cuid()) - datetime_column DateTime @default(now()) - datetime_column_null DateTime? -} - -model type_test_3 { - id Int @id @default(autoincrement()) - bytes Bytes -} - -model Child { - c String @unique - c_1 String - c_2 String - parentId String? @unique - non_unique String? - id String @id - - @@unique([c_1, c_2]) -} - -model Parent { - p String @unique - p_1 String - p_2 String - non_unique String? - id String @id - - @@unique([p_1, p_2]) -} - -model Author { - id Int @id @default(autoincrement()) - firstName String - lastName String - age Int - - @@map("authors") -} - -model Product { - id String @id @default(cuid()) - properties String - properties_null String? -} - -model Unique { - email String @id -} \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/setup.sh b/query-engine/driver-adapters/js/smoke-test-js/setup.sh deleted file mode 100644 index 7654679db14e..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/setup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -cd .. || return -pnpm i && pnpm build -cargo build -p query-engine-node-api -cd smoke-test-js || exit -pnpm i \ No newline at end of file diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts deleted file mode 100644 index b23cf2d97fb8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/client.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { describe, it } from 'node:test' -import path from 'node:path' -import assert from 'node:assert' -import { PrismaClient } from '@prisma/client' -import type { DriverAdapter } from '@prisma/driver-adapter-utils' -import { getLibQueryEnginePath } from '../libquery/util' - -export async function smokeTestClient(driverAdapter: DriverAdapter) { - const provider = driverAdapter.flavour - - const log = [ - { - emit: 'event', - level: 'query', - } as const, - ] - - const dirname = path.dirname(new URL(import.meta.url).pathname) - process.env.PRISMA_QUERY_ENGINE_LIBRARY = getLibQueryEnginePath(dirname) - - // Run twice, once with adapter and once fully without - for (const adapter of [driverAdapter, null]) { - const isUsingDriverAdapters = adapter !== null - describe(isUsingDriverAdapters ? `using Driver Adapters` : `using Rust drivers`, () => { - - it('expected error (on duplicate insert) as exception thrown / promise rejected', async () => { - const prisma = new PrismaClient({ adapter, log }) - - await assert.rejects( - async () => { - const result = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) - const result2 = await prisma.unique.create({ data: { email: 'duplicate@example.com' } }) - }, - (err) => { - assert.match(err.message, /unique/i); - return true; - }, - ); - - }) - - it('batch queries', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([ - prisma.$queryRawUnsafe('SELECT 1'), - prisma.$queryRawUnsafe('SELECT 2'), - prisma.$queryRawUnsafe('SELECT 3'), - ]) - - const defaultExpectedQueries = [ - 'BEGIN', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - 'COMMIT', - ] - - const driverAdapterExpectedQueries = [ - '-- Implicit "BEGIN" query via underlying driver', - 'SELECT 1', - 'SELECT 2', - 'SELECT 3', - '-- Implicit "COMMIT" query via underlying driver', - ] - - // TODO: sqlite should be here too but it's too flaky the way the test is currently written, - // only a subset of logs arrives on time (from 2 to 4 out of 5) - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries, driverAdapterExpectedQueries) - } else { - assert.deepEqual(queries, defaultExpectedQueries) - } - } else if (['postgres'].includes(provider)) { - // Note: the "DEALLOCATE ALL" query is only present after "BEGIN" when using Rust Postgres with pgbouncer. - assert.deepEqual(queries.at(0), defaultExpectedQueries.at(0)) - assert.deepEqual( - queries.filter((q) => q !== 'DEALLOCATE ALL'), - defaultExpectedQueries, - ) - } - }) - - if (provider !== 'sqlite') { - it('applies isolation level when using batch $transaction', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }) - - if (['mysql'].includes(provider)) { - if (isUsingDriverAdapters) { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', '-- Implicit "BEGIN" query via underlying driver']) - } else { - assert.deepEqual(queries.slice(0, 2), ['SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'BEGIN']) - } - } else if (['postgres'].includes(provider)) { - assert.deepEqual(queries.slice(0, 2), ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED']) - } - - assert.deepEqual(queries.at(-1), 'COMMIT') - }) - } else { - describe('isolation levels with sqlite', () => { - it('accepts Serializable as a no-op', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const queries: string[] = [] - prisma.$on('query', ({ query }) => queries.push(query)) - - await prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'Serializable', - }) - - console.log("queries", queries) - - if (isUsingDriverAdapters) { - assert.equal(queries.at(0), '-- Implicit "BEGIN" query via underlying driver') - assert.equal(queries.at(-1), '-- Implicit "COMMIT" query via underlying driver') - } else { - assert.equal(queries.at(0), 'BEGIN') - assert.equal(queries.at(-1), 'COMMIT') - } - - assert(!queries.find((q) => q.includes('SET TRANSACTION ISOLATION LEVEL'))) - }) - - it('throws on unsupported isolation levels', async () => { - const prisma = new PrismaClient({ adapter }) - - assert.rejects( - prisma.$transaction([prisma.child.findMany(), prisma.child.count()], { - isolationLevel: 'ReadCommitted', - }), - ) - }) - - }) - - } - - it('bytes type support', async () => { - const prisma = new PrismaClient({ adapter, log }) - - const result = await prisma.type_test_3.create({ - data: { - bytes: Buffer.from([1, 2, 3, 4]), - }, - }) - - assert.deepEqual(result.bytes, Buffer.from([1, 2, 3, 4])) - }) - - }) - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts deleted file mode 100644 index f216b2a02ac7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/libsql.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('libsql with @prisma/client', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - - if (syncUrl) { - await client.sync() - } - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts deleted file mode 100644 index 53156ac56249..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.http.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import { neon } from '@neondatabase/serverless' -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { smokeTestClient } from './client' - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const connection = neon(connectionString) - const adapter = new PrismaNeonHTTP(connection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts deleted file mode 100644 index 37b0a9088bb7..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/neon.ws.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { describe } from 'node:test' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { PrismaNeon } from '@prisma/adapter-neon' -import { WebSocket } from 'undici' -import { smokeTestClient } from './client' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon with @prisma/client', async () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts deleted file mode 100644 index 99048ad3d95f..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/pg.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { describe } from 'node:test' -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { smokeTestClient } from './client' - -describe('pg with @prisma/client', async () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts deleted file mode 100644 index 3c22b7aa3062..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/client/planetscale.test.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { describe } from 'node:test' -import { smokeTestClient } from './client' - -describe('planetscale with @prisma/client', async () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - - smokeTestClient(adapter) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts deleted file mode 100644 index bd491db289a3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/JsonProtocol.ts +++ /dev/null @@ -1,78 +0,0 @@ -import * as Transaction from './Transaction' - -export type JsonQuery = { - modelName?: string - action: JsonQueryAction - query: JsonFieldSelection -} - -export type JsonBatchQuery = { - batch: JsonQuery[] - transaction?: { isolationLevel?: Transaction.IsolationLevel } -} - -export type JsonQueryAction = - | 'findUnique' - | 'findUniqueOrThrow' - | 'findFirst' - | 'findFirstOrThrow' - | 'findMany' - | 'createOne' - | 'createMany' - | 'updateOne' - | 'updateMany' - | 'deleteOne' - | 'deleteMany' - | 'upsertOne' - | 'aggregate' - | 'groupBy' - | 'executeRaw' - | 'queryRaw' - | 'runCommandRaw' - | 'findRaw' - | 'aggregateRaw' - -export type JsonFieldSelection = { - arguments?: Record - selection: JsonSelectionSet -} - -export type JsonSelectionSet = { - $scalars?: boolean - $composites?: boolean -} & { - [fieldName: string]: boolean | JsonFieldSelection -} - -export type JsonArgumentValue = - | number - | string - | boolean - | null - | JsonTaggedValue - | JsonArgumentValue[] - | { [key: string]: JsonArgumentValue } - -export type DateTaggedValue = { $type: 'DateTime'; value: string } -export type DecimalTaggedValue = { $type: 'Decimal'; value: string } -export type BytesTaggedValue = { $type: 'Bytes'; value: string } -export type BigIntTaggedValue = { $type: 'BigInt'; value: string } -export type FieldRefTaggedValue = { $type: 'FieldRef'; value: { _ref: string } } -export type EnumTaggedValue = { $type: 'Enum'; value: string } -export type JsonTaggedValue = { $type: 'Json'; value: string } - -export type JsonInputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | FieldRefTaggedValue - | JsonTaggedValue - | EnumTaggedValue - -export type JsonOutputTaggedValue = - | DateTaggedValue - | DecimalTaggedValue - | BytesTaggedValue - | BigIntTaggedValue - | JsonTaggedValue diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts deleted file mode 100644 index a25b3dd26728..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Library.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineConfig } from './QueryEngine' - -export type QueryEngineInstance = { - connect(headers: string): Promise - disconnect(headers: string): Promise - /** - * @param requestStr JSON.stringified `QueryEngineRequest | QueryEngineBatchRequest` - * @param headersStr JSON.stringified `QueryEngineRequestHeaders` - */ - query(requestStr: string, headersStr: string, transactionId?: string): Promise - sdlSchema(): Promise - dmmf(traceparent: string): Promise - startTransaction(options: string, traceHeaders: string): Promise - commitTransaction(id: string, traceHeaders: string): Promise - rollbackTransaction(id: string, traceHeaders: string): Promise - metrics(options: string): Promise -} - -export interface QueryEngineConstructor { - new( - config: QueryEngineConfig, - logger: (log: string) => void, - driverAdapter?: ErrorCapturingDriverAdapter, - ): QueryEngineInstance -} - -export interface LibraryLoader { - loadLibrary(): Promise -} - -// Main -export type Library = { - QueryEngine: QueryEngineConstructor - - version: () => { - // The commit hash of the engine - commit: string - // Currently 0.1.0 (Set in Cargo.toml) - version: string - } - /** - * This returns a string representation of `DMMF.Document` - */ - dmmf: (datamodel: string) => Promise -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts deleted file mode 100644 index 5bab74493dee..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/QueryEngine.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { JsonBatchQuery, JsonQuery } from './JsonProtocol' -import * as Transaction from './Transaction' - -// Events -export type QueryEngineEvent = QueryEngineLogEvent | QueryEngineQueryEvent | QueryEnginePanicEvent - -export type QueryEngineLogEvent = { - level: string - module_path: string - message: string - span?: boolean -} - -export type QueryEngineQueryEvent = { - level: 'info' - module_path: string - query: string - item_type: 'query' - params: string - duration_ms: string - result: string -} - -export type QueryEnginePanicEvent = { - level: 'error' - module_path: string - message: 'PANIC' - reason: string - file: string - line: string - column: string -} - -// Configuration -export type QueryEngineLogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'off' - -export type QueryEngineTelemetry = { - enabled: Boolean - endpoint: string -} - -export type GraphQLQuery = { - query: string - variables: object -} - -export type EngineProtocol = 'graphql' | 'json' -export type EngineQuery = GraphQLQuery | JsonQuery - -export type EngineBatchQueries = GraphQLQuery[] | JsonQuery[] - -export type QueryEngineConfig = { - // TODO rename datamodel here and other places - datamodel: string - configDir: string - logQueries: boolean - ignoreEnvVarErrors: boolean - datasourceOverrides?: Record - env: Record - logLevel: QueryEngineLogLevel - telemetry?: QueryEngineTelemetry - engineProtocol: EngineProtocol -} - -// Errors -export type SyncRustError = { - is_panic: boolean - message: string - meta: { - full_error: string - } - error_code: string -} - -export type RustRequestError = { - is_panic: boolean - message: string - backtrace: string -} - -export type QueryEngineResult = { - data: T - elapsed: number -} - -export type QueryEngineBatchRequest = QueryEngineBatchGraphQLRequest | JsonBatchQuery - -export type QueryEngineBatchGraphQLRequest = { - batch: QueryEngineRequest[] - transaction?: boolean - isolationLevel?: Transaction.IsolationLevel -} - -export type QueryEngineRequest = { - query: string - variables: Object -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts b/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts deleted file mode 100644 index 1c5786cc66da..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/engines/types/Transaction.ts +++ /dev/null @@ -1,35 +0,0 @@ -export enum IsolationLevel { - ReadUncommitted = 'ReadUncommitted', - ReadCommitted = 'ReadCommitted', - RepeatableRead = 'RepeatableRead', - Snapshot = 'Snapshot', - Serializable = 'Serializable', -} - -/** - * maxWait ?= 2000 - * timeout ?= 5000 - */ -export type Options = { - maxWait?: number - timeout?: number - isolationLevel?: IsolationLevel -} - -export type InteractiveTransactionInfo = { - /** - * Transaction ID returned by the query engine. - */ - id: string - - /** - * Arbitrary payload the meaning of which depends on the `Engine` implementation. - * For example, `DataProxyEngine` needs to associate different API endpoints with transactions. - * In `LibraryEngine` and `BinaryEngine` it is currently not used. - */ - payload: Payload -} - -export type TransactionHeaders = { - traceparent?: string -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts deleted file mode 100644 index 13ac5cd9ec81..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/errors.test.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { bindAdapter } from '@prisma/driver-adapter-utils' -import test, { after, before, describe } from 'node:test' -import { createQueryFn, initQueryEngine, throwAdapterError } from './util' -import assert from 'node:assert' - -const fakeAdapter = bindAdapter({ - flavour: 'postgres', - startTransaction() { - throw new Error('Error in startTransaction') - }, - - queryRaw() { - throw new Error('Error in queryRaw') - }, - - executeRaw() { - throw new Error('Error in executeRaw') - }, - close() { - return Promise.resolve({ ok: true, value: undefined }) - }, -}) - -const engine = initQueryEngine(fakeAdapter, '../../prisma/postgres/schema.prisma') -const doQuery = createQueryFn(engine, fakeAdapter) - -const startTransaction = async () => { - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const res = JSON.parse(await engine.startTransaction(JSON.stringify(args), '{}')) - if (res['error_code']) { - throwAdapterError(res, fakeAdapter) - } -} - -describe('errors propagation', () => { - before(async () => { - await engine.connect('{}') - }) - after(async () => { - await engine.disconnect('{}') - }) - - test('works for queries', async () => { - await assert.rejects( - doQuery({ - modelName: 'Product', - action: 'findMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }), - /Error in queryRaw/, - ) - }) - - test('works for executeRaw', async () => { - await assert.rejects( - doQuery({ - action: 'executeRaw', - query: { - arguments: { - query: 'SELECT 1', - parameters: '[]', - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in executeRaw/, - ) - }) - - test('works with implicit transaction', async () => { - await assert.rejects( - doQuery({ - modelName: 'User', - action: 'createOne', - query: { - arguments: { - data: { - email: 'user@example.com', - favoriteProduct: { - create: { - properties: {}, - }, - }, - }, - }, - selection: { - $scalars: true, - }, - }, - }), - /Error in startTransaction/, - ) - }) - - test('works with explicit transaction', async () => { - await assert.rejects(startTransaction(), /Error in startTransaction/) - }) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts deleted file mode 100644 index c50ad3e257ab..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ /dev/null @@ -1,722 +0,0 @@ -import { describe, it, before, after } from 'node:test' -import assert from 'node:assert' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import type { QueryEngineInstance } from '../engines/types/Library' -import { createQueryFn, initQueryEngine } from './util' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function smokeTestLibquery( - adapter: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, - supportsTransactions = true, -) { - const engine = initQueryEngine(adapter, prismaSchemaRelativePath) - const flavour = adapter.flavour - - const doQuery = createQueryFn(engine, adapter) - - describe('using libquery with Driver Adapters', () => { - before(async () => { - await engine.connect('trace') - }) - - after(async () => { - await engine.disconnect('trace') - await adapter.close() - }) - - it('create JSON values', async () => { - const json = JSON.stringify({ - foo: 'bar', - baz: 1, - }) - - const created = await doQuery({ - action: 'createOne', - modelName: 'Product', - query: { - arguments: { - data: { - properties: json, - properties_null: null, - }, - }, - selection: { - properties: true, - }, - }, - }) - - if (flavour !== 'sqlite') { - assert.strictEqual(created.data.createOneProduct.properties.$type, 'Json') - } - - console.log('[nodejs] created', JSON.stringify(created, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'Product', - query: { - selection: { - id: true, - properties: true, - properties_null: true, - }, - }, - }) - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'Product', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create with autoincrement', async () => { - await doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - }) - - it('create non scalar types', async () => { - const create = await doQuery({ - action: 'createOne', - modelName: 'type_test_2', - query: { - arguments: { - data: {}, - }, - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - }, - }) - - console.log('[nodejs] create', JSON.stringify(create, null, 2)) - - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_2', - query: { - selection: { - id: true, - datetime_column: true, - datetime_column_null: true, - }, - arguments: { - where: {}, - }, - }, - }) - - console.log('[nodejs] resultSet', JSON.stringify(resultSet, null, 2)) - - await doQuery({ - action: 'deleteMany', - modelName: 'type_test_2', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create/delete parent and child', async () => { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - await doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - }) - - it('create explicit transaction', async () => { - if (!supportsTransactions) return - - const args = { isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 } - const startResponse = await engine.startTransaction(JSON.stringify(args), 'trace') - const tx_id = JSON.parse(startResponse).id - console.log('[nodejs] transaction id', tx_id) - assert.notStrictEqual(tx_id, undefined) - - await doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - }) - - it('expected error (on duplicate insert) as json result (not throwing error)', async () => { - await doQuery({ - modelName: 'Unique', - action: 'deleteMany', - query: { - arguments: {}, - selection: { - $scalars: true, - }, - }, - }) - - await doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - - const promise = doQuery({ - modelName: 'Unique', - action: 'createOne', - query: { - arguments: { - data: { email: 'duplicate@example.com' }, - }, - selection: { - $scalars: true, - }, - }, - }) - - const result = await promise - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) - assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') - }) - - describe('read scalar and non scalar types', () => { - if (['mysql'].includes(flavour)) { - it('mysql', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['postgres'].includes(flavour)) { - it('postgres', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else if (['sqlite'].includes(flavour)) { - it('sqlite', async () => { - const resultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - int_column: true, - bigint_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - text_column: true, - datetime_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - }) - } else { - throw new Error(`Missing test for flavour ${flavour}`) - } - }) - - it('write and read back bytes', async () => { - const createResultSet = await doQuery({ - action: 'createOne', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - arguments: { - data: { - bytes: { - $type: 'Bytes', - value: 'AQID', - }, - }, - }, - }, - }) - console.log('[nodejs] createOne resultSet:') - console.dir(createResultSet, { depth: Infinity }) - - const findResultSet = await doQuery({ - action: 'findMany', - modelName: 'type_test_3', - query: { - selection: { - bytes: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet:') - console.dir(findResultSet, { depth: Infinity }) - }) - }) -} - -class SmokeTest { - readonly flavour: ErrorCapturingDriverAdapter['flavour'] - - constructor(private readonly engine: QueryEngineInstance, private readonly connector: ErrorCapturingDriverAdapter) { - this.flavour = connector.flavour - } - - async testFindManyTypeTest() { - await this.testFindManyTypeTestMySQL() - await this.testFindManyTypeTestPostgres() - } - - private async testFindManyTypeTestMySQL() { - if (this.flavour !== 'mysql') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - tinyint_column: true, - smallint_column: true, - mediumint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - binary_column: true, - varbinary_column: true, - blob_column: true, - }, - }, - }) - - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - private async testFindManyTypeTestPostgres() { - if (this.flavour !== 'postgres') { - return - } - - const resultSet = await this.doQuery({ - action: 'findMany', - modelName: 'type_test', - query: { - selection: { - smallint_column: true, - int_column: true, - bigint_column: true, - float_column: true, - double_column: true, - decimal_column: true, - boolean_column: true, - char_column: true, - varchar_column: true, - text_column: true, - date_column: true, - time_column: true, - datetime_column: true, - timestamp_column: true, - json_column: true, - enum_column: true, - }, - }, - }) - console.log('[nodejs] findMany resultSet', JSON.stringify(resultSet, null, 2)) - - return resultSet - } - - async createAutoIncrement() { - await this.doQuery({ - modelName: 'Author', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - const author = await this.doQuery({ - modelName: 'Author', - action: 'createOne', - query: { - arguments: { - data: { - firstName: 'Firstname from autoincrement', - lastName: 'Lastname from autoincrement', - age: 99, - }, - }, - selection: { - id: true, - firstName: true, - lastName: true, - }, - }, - }) - console.log('[nodejs] author', JSON.stringify(author, null, 2)) - } - - async testCreateAndDeleteChildParent() { - /* Delete all child and parent records */ - - // Queries: [ - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'SELECT `cf-users`.`Child`.`id` FROM `cf-users`.`Child` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Child` WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Child', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE 1=1', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND 1=1)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: {}, - }, - selection: { - count: true, - }, - }, - }) - - /* Create a parent with some new children, within a transaction */ - - // Queries: [ - // 'INSERT INTO `cf-users`.`Parent` (`p`,`p_1`,`p_2`,`id`) VALUES (?,?,?,?)', - // 'INSERT INTO `cf-users`.`Child` (`c`,`c_1`,`c_2`,`parentId`,`id`) VALUES (?,?,?,?,?)', - // 'SELECT `cf-users`.`Parent`.`id`, `cf-users`.`Parent`.`p` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`id` = ? LIMIT ? OFFSET ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`c`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE `cf-users`.`Child`.`parentId` IN (?)' - // ] - await this.doQuery({ - modelName: 'Parent', - action: 'createOne', - query: { - arguments: { - data: { - p: 'p1', - p_1: '1', - p_2: '2', - childOpt: { - create: { - c: 'c1', - c_1: 'foo', - c_2: 'bar', - }, - }, - }, - }, - selection: { - p: true, - childOpt: { - selection: { - c: true, - }, - }, - }, - }, - }) - - /* Delete the parent */ - - // Queries: [ - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'SELECT `cf-users`.`Child`.`id`, `cf-users`.`Child`.`parentId` FROM `cf-users`.`Child` WHERE (1=1 AND `cf-users`.`Child`.`parentId` IN (?))', - // 'UPDATE `cf-users`.`Child` SET `parentId` = ? WHERE (`cf-users`.`Child`.`id` IN (?) AND 1=1)', - // 'SELECT `cf-users`.`Parent`.`id` FROM `cf-users`.`Parent` WHERE `cf-users`.`Parent`.`p` = ?', - // 'DELETE FROM `cf-users`.`Parent` WHERE (`cf-users`.`Parent`.`id` IN (?) AND `cf-users`.`Parent`.`p` = ?)' - // ] - const resultDeleteMany = await this.doQuery({ - modelName: 'Parent', - action: 'deleteMany', - query: { - arguments: { - where: { - p: 'p1', - }, - }, - selection: { - count: true, - }, - }, - }) - console.log('[nodejs] resultDeleteMany', JSON.stringify(resultDeleteMany, null, 2)) - } - - async testTransaction() { - const startResponse = await this.engine.startTransaction( - JSON.stringify({ isolation_level: 'Serializable', max_wait: 5000, timeout: 15000 }), - 'trace', - ) - - const tx_id = JSON.parse(startResponse).id - - console.log('[nodejs] transaction id', tx_id) - await this.doQuery( - { - action: 'findMany', - modelName: 'Author', - query: { - selection: { $scalars: true }, - }, - }, - tx_id, - ) - - const commitResponse = await this.engine.commitTransaction(tx_id, 'trace') - console.log('[nodejs] commited', commitResponse) - } - - private async doQuery(query: JsonQuery, tx_id?: string) { - const result = await this.engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error - if (error.error_code === 'P2036') { - const jsError = this.connector.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } - } - return parsedResult - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts deleted file mode 100644 index 7f0a1038ec74..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libsql.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { IntMode, createClient } from '@libsql/client' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('libsql', async () => { - const url = process.env.JS_LIBSQL_DATABASE_URL as string - const syncUrl = process.env.JS_LIBSQL_SYNC_URL - const authToken = process.env.JS_LIBSQL_AUTH_TOKEN - const intMode = process.env.JS_LIBSQL_INT_MODE as IntMode | undefined - - const client = createClient({ url, syncUrl, authToken, intMode }) - const adapter = new PrismaLibSQL(client) - const driverAdapter = bindAdapter(adapter) - - if (syncUrl) { - await client.sync() - } - - smokeTestLibquery(driverAdapter, '../../prisma/sqlite/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts deleted file mode 100644 index 02872b885fe3..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.http.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { PrismaNeonHTTP } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { neon } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('neon (HTTP)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const neonConnection = neon(connectionString) - - const adapter = new PrismaNeonHTTP(neonConnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma', false) -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts deleted file mode 100644 index 54765f5961ba..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/neon.ws.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { PrismaNeon } from '@prisma/adapter-neon' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { WebSocket } from 'undici' -import { Pool, neonConfig } from '@neondatabase/serverless' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -neonConfig.webSocketConstructor = WebSocket - -describe('neon (WebSocket)', () => { - const connectionString = process.env.JS_NEON_DATABASE_URL ?? '' - - const pool = new Pool({ connectionString }) - const adapter = new PrismaNeon(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts deleted file mode 100644 index 9b79e7284be8..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/pg.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import pg from 'pg' -import { PrismaPg } from '@prisma/adapter-pg' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('pg', () => { - const connectionString = process.env.JS_PG_DATABASE_URL ?? '' - - const pool = new pg.Pool({ connectionString }) - const adapter = new PrismaPg(pool) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/postgres/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts deleted file mode 100644 index bb7c81805adc..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/planetscale.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { connect } from '@planetscale/database' -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { describe } from 'node:test' -import { smokeTestLibquery } from './libquery' - -describe('planetscale', () => { - const connectionString = process.env.JS_PLANETSCALE_DATABASE_URL ?? '' - - const connnection = connect({ url: connectionString }) - const adapter = new PrismaPlanetScale(connnection) - const driverAdapter = bindAdapter(adapter) - - smokeTestLibquery(driverAdapter, '../../prisma/mysql/schema.prisma') -}) diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts deleted file mode 100644 index 783eb76759d2..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/util.ts +++ /dev/null @@ -1,71 +0,0 @@ -import path from 'node:path' -import os from 'node:os' -import fs from 'node:fs' -import type { ErrorCapturingDriverAdapter } from '@prisma/driver-adapter-utils' -import { Library, QueryEngineInstance } from '../engines/types/Library' -import { JsonQuery } from '../engines/types/JsonProtocol' - -export function initQueryEngine( - driver: ErrorCapturingDriverAdapter, - prismaSchemaRelativePath: string, -): QueryEngineInstance { - const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = getLibQueryEnginePath(dirname) - - const schemaPath = path.join(dirname, prismaSchemaRelativePath) - - console.log('[nodejs] read Prisma schema from', schemaPath) - - const libqueryEngine = { exports: {} as unknown as Library } - // @ts-ignore - process.dlopen(libqueryEngine, libQueryEnginePath) - - const QueryEngine = libqueryEngine.exports.QueryEngine - - const queryEngineOptions = { - datamodel: fs.readFileSync(schemaPath, 'utf-8'), - configDir: '.', - engineProtocol: 'json' as const, - logLevel: 'info' as const, - logQueries: false, - env: process.env, - ignoreEnvVarErrors: false, - } - - const logCallback = (...args) => { - console.log(args) - } - - const engine = new QueryEngine(queryEngineOptions, logCallback, driver) - - return engine -} - -export function getLibQueryEnginePath(dirname: String) { - // I assume nobody will run this on Windows ¯\_(ツ)_/¯ - const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' - return path.join(dirname, `../../../../../../target/debug/libquery_engine.${libExt}`) -} - -export function createQueryFn(engine: QueryEngineInstance, adapter: ErrorCapturingDriverAdapter) { - return async function doQuery(query: JsonQuery, tx_id?: string) { - const result = await engine.query(JSON.stringify(query), 'trace', tx_id) - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - throwAdapterError(parsedResult.errors[0]?.user_facing_error, adapter) - } - return parsedResult - } -} - -export function throwAdapterError(error: any, adapter: ErrorCapturingDriverAdapter) { - if (error.error_code === 'P2036') { - const jsError = adapter.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - throw new Error( - `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`, - ) - } - throw jsError.error - } -} diff --git a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json b/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json deleted file mode 100644 index 3c43903cfdd1..000000000000 --- a/query-engine/driver-adapters/js/smoke-test-js/tsconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../tsconfig.json" -} diff --git a/query-engine/driver-adapters/js/tsconfig.json b/query-engine/driver-adapters/js/tsconfig.json deleted file mode 100644 index b405cea50201..000000000000 --- a/query-engine/driver-adapters/js/tsconfig.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2022", - "module": "ESNext", - "lib": ["ES2022"], - "moduleResolution": "Bundler", - "esModuleInterop": false, - "isolatedModules": true, - "sourceMap": true, - "declaration": true, - "strict": true, - "noImplicitAny": false, - "noUncheckedIndexedAccess": false, - "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true, - "useUnknownInCatchVariables": false, - "skipDefaultLibCheck": true, - "skipLibCheck": true, - "emitDeclarationOnly": true, - "resolveJsonModule": true - }, - "exclude": ["**/dist", "**/declaration", "**/node_modules", "**/src/__tests__"] -} diff --git a/query-engine/driver-adapters/js/version.sh b/query-engine/driver-adapters/js/version.sh deleted file mode 100755 index 8f592c0e197c..000000000000 --- a/query-engine/driver-adapters/js/version.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# Usage: `./version.sh x.y.z` will set the `x.y.z` to every package in the monorepo. - -target_version=$1 -package_dirs=$(pnpm -r list -r --depth -1 --json | jq -r '.[] | .path' | tail -n +2) - -# Iterate through each package directory -for package_dir in $package_dirs; do - # Check if the directory exists - if [ -d "$package_dir" ]; then - # Set the target version using pnpm - (cd "$package_dir" && pnpm version "$target_version" --no-git-tag-version --allow-same-version) - fi -done diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index c43f66a81e72..53133e037b6f 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -31,7 +31,6 @@ pub struct SqliteErrorDef { #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum DriverAdapterError { /// Unexpected JS exception GenericJs { @@ -64,7 +63,6 @@ impl From for QuaintError { } /// Wrapper for JS-side result type -/// See driver-adapters/js/adapter-utils/src/types.ts file for example pub(crate) enum JsResult where T: FromNapiValue, From 86b585ea21f6166369035c6af99d8127b1699560 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Tue, 24 Oct 2023 16:48:48 +0200 Subject: [PATCH 02/26] Remove node_modules from connector-test-kit-executor --- .../connector-test-kit-executor/node_modules/@libsql/client | 1 - .../node_modules/@neondatabase/serverless | 1 - .../node_modules/@planetscale/database | 1 - .../node_modules/@prisma/adapter-libsql | 1 - .../node_modules/@prisma/adapter-neon | 1 - .../connector-test-kit-executor/node_modules/@prisma/adapter-pg | 1 - .../node_modules/@prisma/adapter-planetscale | 1 - .../node_modules/@prisma/driver-adapter-utils | 1 - .../connector-test-kit-executor/node_modules/@types/pg | 1 - .../driver-adapters/connector-test-kit-executor/node_modules/pg | 1 - .../connector-test-kit-executor/node_modules/undici | 1 - 11 files changed, 11 deletions(-) delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@libsql/client delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@neondatabase/serverless delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@planetscale/database delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-libsql delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-neon delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-pg delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-planetscale delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/driver-adapter-utils delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/@types/pg delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/pg delete mode 120000 query-engine/driver-adapters/connector-test-kit-executor/node_modules/undici diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@libsql/client b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@libsql/client deleted file mode 120000 index e005c95cbe57..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@libsql/client +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/@libsql+client@0.3.5/node_modules/@libsql/client \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@neondatabase/serverless b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@neondatabase/serverless deleted file mode 120000 index 1b8b5360d9d7..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@neondatabase/serverless +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/@neondatabase+serverless@0.6.0/node_modules/@neondatabase/serverless \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@planetscale/database b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@planetscale/database deleted file mode 120000 index b7e0d5f2efda..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@planetscale/database +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/@planetscale+database@1.11.0/node_modules/@planetscale/database \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-libsql b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-libsql deleted file mode 120000 index 2f6708d276ca..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-libsql +++ /dev/null @@ -1 +0,0 @@ -../../../adapter-libsql \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-neon b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-neon deleted file mode 120000 index f2ca2a7c3fcf..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-neon +++ /dev/null @@ -1 +0,0 @@ -../../../adapter-neon \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-pg b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-pg deleted file mode 120000 index d152ffc620d4..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-pg +++ /dev/null @@ -1 +0,0 @@ -../../../adapter-pg \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-planetscale b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-planetscale deleted file mode 120000 index 936e67a0c767..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/adapter-planetscale +++ /dev/null @@ -1 +0,0 @@ -../../../adapter-planetscale \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/driver-adapter-utils b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/driver-adapter-utils deleted file mode 120000 index 043d62ea22f9..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@prisma/driver-adapter-utils +++ /dev/null @@ -1 +0,0 @@ -../../../driver-adapter-utils \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@types/pg b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@types/pg deleted file mode 120000 index 59ded2fc841a..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/@types/pg +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/@types+pg@8.10.2/node_modules/@types/pg \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/pg b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/pg deleted file mode 120000 index 5853d2642341..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/pg +++ /dev/null @@ -1 +0,0 @@ -../../node_modules/.pnpm/pg@8.11.3/node_modules/pg \ No newline at end of file diff --git a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/undici b/query-engine/driver-adapters/connector-test-kit-executor/node_modules/undici deleted file mode 120000 index f0e268871dfc..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/node_modules/undici +++ /dev/null @@ -1 +0,0 @@ -../../node_modules/.pnpm/undici@5.23.0/node_modules/undici \ No newline at end of file From a93c6f7229455a2d93ad41b21a1cb9765617513b Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Tue, 24 Oct 2023 16:49:37 +0200 Subject: [PATCH 03/26] Remove dist from connector-test-kit-executor --- .../connector-test-kit-executor/.gitignore | 0 .../dist/index.d.mts | 2 - .../dist/index.d.ts | 2 - .../connector-test-kit-executor/dist/index.js | 250 ------------------ .../dist/index.mjs | 225 ---------------- 5 files changed, 479 deletions(-) create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/.gitignore delete mode 100644 query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.mts delete mode 100644 query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.ts delete mode 100644 query-engine/driver-adapters/connector-test-kit-executor/dist/index.js delete mode 100755 query-engine/driver-adapters/connector-test-kit-executor/dist/index.mjs diff --git a/query-engine/driver-adapters/connector-test-kit-executor/.gitignore b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.mts b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.mts deleted file mode 100644 index c9247d453553..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.mts +++ /dev/null @@ -1,2 +0,0 @@ - -export { } diff --git a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.ts b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.ts deleted file mode 100644 index c9247d453553..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ - -export { } diff --git a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.js b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.js deleted file mode 100644 index 14278e014d51..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.js +++ /dev/null @@ -1,250 +0,0 @@ -"use strict"; -var __create = Object.create; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __getProtoOf = Object.getPrototypeOf; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( - // If the importer is in node compatibility mode or this is not an ESM - // file that has been converted to a CommonJS file using a Babel- - // compatible transform (i.e. "__esModule" has not been set), then set - // "default" to the CommonJS "module.exports" for node compatibility. - isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, - mod -)); - -// src/qe.ts -var os = __toESM(require("os")); -var path = __toESM(require("path")); -var import_meta = {}; -function initQueryEngine(adapter, datamodel, queryLogCallback, debug2) { - const libExt = os.platform() === "darwin" ? "dylib" : "so"; - const dirname2 = path.dirname(new URL(import_meta.url).pathname); - const libQueryEnginePath = path.join(dirname2, `../../../../../target/debug/libquery_engine.${libExt}`); - const libqueryEngine = { exports: {} }; - process.dlopen(libqueryEngine, libQueryEnginePath); - const QueryEngine = libqueryEngine.exports.QueryEngine; - const queryEngineOptions = { - datamodel, - configDir: ".", - engineProtocol: "json", - logLevel: process.env["RUST_LOG"] ?? "info", - logQueries: true, - env: process.env, - ignoreEnvVarErrors: false - }; - const logCallback = (event) => { - const parsed = JSON.parse(event); - if (parsed.is_query) { - queryLogCallback(parsed.query); - } - debug2(parsed); - }; - return new QueryEngine(queryEngineOptions, logCallback, adapter); -} - -// src/index.ts -var readline = __toESM(require("readline")); -var import_pg = __toESM(require("pg")); -var prismaPg = __toESM(require("@prisma/adapter-pg")); -var import_serverless = require("@neondatabase/serverless"); -var import_undici = require("undici"); -var prismaNeon = __toESM(require("@prisma/adapter-neon")); -var import_client = require("@libsql/client"); -var import_adapter_libsql = require("@prisma/adapter-libsql"); -var import_database = require("@planetscale/database"); -var import_adapter_planetscale = require("@prisma/adapter-planetscale"); -var import_driver_adapter_utils = require("@prisma/driver-adapter-utils"); -var SUPPORTED_ADAPTERS = { - "pg": pgAdapter, - "neon:ws": neonWsAdapter, - "libsql": libsqlAdapter, - "planetscale": planetscaleAdapter -}; -var debug = (() => { - if ((process.env.LOG_LEVEL ?? "").toLowerCase() != "debug") { - return (...args) => { - }; - } - return (...args) => { - console.error("[nodejs] DEBUG:", ...args); - }; -})(); -var err = (...args) => console.error("[nodejs] ERROR:", ...args); -async function main() { - const iface = readline.createInterface({ - input: process.stdin, - output: process.stdout, - terminal: false - }); - iface.on("line", async (line) => { - try { - const request = JSON.parse(line); - debug(`Got a request: ${line}`); - try { - const response = await handleRequest(request.method, request.params); - respondOk(request.id, response); - } catch (err2) { - debug("[nodejs] Error from request handler: ", err2); - respondErr(request.id, { - code: 1, - message: err2.toString() - }); - } - } catch (err2) { - debug("Received non-json line: ", line); - } - }); -} -var state = {}; -async function handleRequest(method, params) { - switch (method) { - case "initializeSchema": { - const castParams = params; - const logs = []; - const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { - logs.push(log); - }); - await engine.connect(""); - state[castParams.schemaId] = { - engine, - adapter, - logs - }; - return null; - } - case "query": { - debug("Got `query`", params); - const castParams = params; - const engine = state[castParams.schemaId].engine; - const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId); - const parsedResult = JSON.parse(result); - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error; - if (error.error_code === "P2036") { - const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id); - if (!jsError) { - err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`); - } else { - err("got error response from the engine caused by the driver: ", jsError); - } - } - } - debug("got response from engine: ", result); - return result; - } - case "startTx": { - debug("Got `startTx", params); - const { schemaId, options } = params; - const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), ""); - return JSON.parse(result); - } - case "commitTx": { - debug("Got `commitTx", params); - const { schemaId, txId } = params; - const result = await state[schemaId].engine.commitTransaction(txId, "{}"); - return JSON.parse(result); - } - case "rollbackTx": { - debug("Got `rollbackTx", params); - const { schemaId, txId } = params; - const result = await state[schemaId].engine.rollbackTransaction(txId, "{}"); - return JSON.parse(result); - } - case "teardown": { - debug("Got `teardown", params); - const castParams = params; - await state[castParams.schemaId].engine.disconnect(""); - delete state[castParams.schemaId]; - return {}; - } - case "getLogs": { - const castParams = params; - return state[castParams.schemaId].logs; - } - default: { - throw new Error(`Unknown method: \`${method}\``); - } - } -} -function respondErr(requestId, error) { - const msg = { - jsonrpc: "2.0", - id: requestId, - error - }; - console.log(JSON.stringify(msg)); -} -function respondOk(requestId, payload) { - const msg = { - jsonrpc: "2.0", - id: requestId, - result: payload - }; - console.log(JSON.stringify(msg)); -} -async function initQe(url, prismaSchema, logCallback) { - const adapter = await adapterFromEnv(url); - const errorCapturingAdapter = (0, import_driver_adapter_utils.bindAdapter)(adapter); - const engineInstance = initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback, debug); - return [engineInstance, errorCapturingAdapter]; -} -async function adapterFromEnv(url) { - const adapter = process.env.DRIVER_ADAPTER ?? ""; - if (adapter == "") { - throw new Error("DRIVER_ADAPTER is not defined or empty."); - } - if (!(adapter in SUPPORTED_ADAPTERS)) { - throw new Error(`Unsupported driver adapter: ${adapter}`); - } - return await SUPPORTED_ADAPTERS[adapter](url); -} -function postgres_options(url) { - let args = { connectionString: url }; - const schemaName = new URL(url).searchParams.get("schema"); - if (schemaName != null) { - args.options = `--search_path="${schemaName}"`; - } - return args; -} -async function pgAdapter(url) { - const pool = new import_pg.default.Pool(postgres_options(url)); - return new prismaPg.PrismaPg(pool); -} -async function neonWsAdapter(url) { - const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || "{}").proxyUrl ?? ""; - if (proxyURL == "") { - throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for neon adapter."); - } - import_serverless.neonConfig.wsProxy = () => proxyURL; - import_serverless.neonConfig.webSocketConstructor = import_undici.WebSocket; - import_serverless.neonConfig.useSecureWebSocket = false; - import_serverless.neonConfig.pipelineConnect = false; - const pool = new import_serverless.Pool(postgres_options(url)); - return new prismaNeon.PrismaNeon(pool); -} -async function libsqlAdapter(url) { - const libsql = (0, import_client.createClient)({ url, intMode: "bigint" }); - return new import_adapter_libsql.PrismaLibSQL(libsql); -} -async function planetscaleAdapter(url) { - const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || "{}").proxyUrl ?? ""; - if (proxyURL == "") { - throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); - } - const connection = (0, import_database.connect)({ - url: proxyURL, - fetch: import_undici.fetch - }); - return new import_adapter_planetscale.PrismaPlanetScale(connection); -} -main().catch(err); diff --git a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.mjs b/query-engine/driver-adapters/connector-test-kit-executor/dist/index.mjs deleted file mode 100755 index 72a403d687a2..000000000000 --- a/query-engine/driver-adapters/connector-test-kit-executor/dist/index.mjs +++ /dev/null @@ -1,225 +0,0 @@ -// src/qe.ts -import * as os from "os"; -import * as path from "path"; -function initQueryEngine(adapter, datamodel, queryLogCallback, debug2) { - const libExt = os.platform() === "darwin" ? "dylib" : "so"; - const dirname2 = path.dirname(new URL(import.meta.url).pathname); - const libQueryEnginePath = path.join(dirname2, `../../../../../target/debug/libquery_engine.${libExt}`); - const libqueryEngine = { exports: {} }; - process.dlopen(libqueryEngine, libQueryEnginePath); - const QueryEngine = libqueryEngine.exports.QueryEngine; - const queryEngineOptions = { - datamodel, - configDir: ".", - engineProtocol: "json", - logLevel: process.env["RUST_LOG"] ?? "info", - logQueries: true, - env: process.env, - ignoreEnvVarErrors: false - }; - const logCallback = (event) => { - const parsed = JSON.parse(event); - if (parsed.is_query) { - queryLogCallback(parsed.query); - } - debug2(parsed); - }; - return new QueryEngine(queryEngineOptions, logCallback, adapter); -} - -// src/index.ts -import * as readline from "readline"; -import pgDriver from "pg"; -import * as prismaPg from "@prisma/adapter-pg"; -import { Pool as NeonPool, neonConfig } from "@neondatabase/serverless"; -import { fetch, WebSocket } from "undici"; -import * as prismaNeon from "@prisma/adapter-neon"; -import { createClient } from "@libsql/client"; -import { PrismaLibSQL } from "@prisma/adapter-libsql"; -import { connect as planetscaleConnect } from "@planetscale/database"; -import { PrismaPlanetScale } from "@prisma/adapter-planetscale"; -import { bindAdapter } from "@prisma/driver-adapter-utils"; -var SUPPORTED_ADAPTERS = { - "pg": pgAdapter, - "neon:ws": neonWsAdapter, - "libsql": libsqlAdapter, - "planetscale": planetscaleAdapter -}; -var debug = (() => { - if ((process.env.LOG_LEVEL ?? "").toLowerCase() != "debug") { - return (...args) => { - }; - } - return (...args) => { - console.error("[nodejs] DEBUG:", ...args); - }; -})(); -var err = (...args) => console.error("[nodejs] ERROR:", ...args); -async function main() { - const iface = readline.createInterface({ - input: process.stdin, - output: process.stdout, - terminal: false - }); - iface.on("line", async (line) => { - try { - const request = JSON.parse(line); - debug(`Got a request: ${line}`); - try { - const response = await handleRequest(request.method, request.params); - respondOk(request.id, response); - } catch (err2) { - debug("[nodejs] Error from request handler: ", err2); - respondErr(request.id, { - code: 1, - message: err2.toString() - }); - } - } catch (err2) { - debug("Received non-json line: ", line); - } - }); -} -var state = {}; -async function handleRequest(method, params) { - switch (method) { - case "initializeSchema": { - const castParams = params; - const logs = []; - const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { - logs.push(log); - }); - await engine.connect(""); - state[castParams.schemaId] = { - engine, - adapter, - logs - }; - return null; - } - case "query": { - debug("Got `query`", params); - const castParams = params; - const engine = state[castParams.schemaId].engine; - const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId); - const parsedResult = JSON.parse(result); - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error; - if (error.error_code === "P2036") { - const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id); - if (!jsError) { - err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`); - } else { - err("got error response from the engine caused by the driver: ", jsError); - } - } - } - debug("got response from engine: ", result); - return result; - } - case "startTx": { - debug("Got `startTx", params); - const { schemaId, options } = params; - const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), ""); - return JSON.parse(result); - } - case "commitTx": { - debug("Got `commitTx", params); - const { schemaId, txId } = params; - const result = await state[schemaId].engine.commitTransaction(txId, "{}"); - return JSON.parse(result); - } - case "rollbackTx": { - debug("Got `rollbackTx", params); - const { schemaId, txId } = params; - const result = await state[schemaId].engine.rollbackTransaction(txId, "{}"); - return JSON.parse(result); - } - case "teardown": { - debug("Got `teardown", params); - const castParams = params; - await state[castParams.schemaId].engine.disconnect(""); - delete state[castParams.schemaId]; - return {}; - } - case "getLogs": { - const castParams = params; - return state[castParams.schemaId].logs; - } - default: { - throw new Error(`Unknown method: \`${method}\``); - } - } -} -function respondErr(requestId, error) { - const msg = { - jsonrpc: "2.0", - id: requestId, - error - }; - console.log(JSON.stringify(msg)); -} -function respondOk(requestId, payload) { - const msg = { - jsonrpc: "2.0", - id: requestId, - result: payload - }; - console.log(JSON.stringify(msg)); -} -async function initQe(url, prismaSchema, logCallback) { - const adapter = await adapterFromEnv(url); - const errorCapturingAdapter = bindAdapter(adapter); - const engineInstance = initQueryEngine(errorCapturingAdapter, prismaSchema, logCallback, debug); - return [engineInstance, errorCapturingAdapter]; -} -async function adapterFromEnv(url) { - const adapter = process.env.DRIVER_ADAPTER ?? ""; - if (adapter == "") { - throw new Error("DRIVER_ADAPTER is not defined or empty."); - } - if (!(adapter in SUPPORTED_ADAPTERS)) { - throw new Error(`Unsupported driver adapter: ${adapter}`); - } - return await SUPPORTED_ADAPTERS[adapter](url); -} -function postgres_options(url) { - let args = { connectionString: url }; - const schemaName = new URL(url).searchParams.get("schema"); - if (schemaName != null) { - args.options = `--search_path="${schemaName}"`; - } - return args; -} -async function pgAdapter(url) { - const pool = new pgDriver.Pool(postgres_options(url)); - return new prismaPg.PrismaPg(pool); -} -async function neonWsAdapter(url) { - const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || "{}").proxyUrl ?? ""; - if (proxyURL == "") { - throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for neon adapter."); - } - neonConfig.wsProxy = () => proxyURL; - neonConfig.webSocketConstructor = WebSocket; - neonConfig.useSecureWebSocket = false; - neonConfig.pipelineConnect = false; - const pool = new NeonPool(postgres_options(url)); - return new prismaNeon.PrismaNeon(pool); -} -async function libsqlAdapter(url) { - const libsql = createClient({ url, intMode: "bigint" }); - return new PrismaLibSQL(libsql); -} -async function planetscaleAdapter(url) { - const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || "{}").proxyUrl ?? ""; - if (proxyURL == "") { - throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); - } - const connection = planetscaleConnect({ - url: proxyURL, - fetch - }); - return new PrismaPlanetScale(connection); -} -main().catch(err); From 4e6db6e8ebb67a5dde619f00704e27a97caa3eb9 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Tue, 24 Oct 2023 16:51:12 +0200 Subject: [PATCH 04/26] Ignore non-relevant files --- .gitignore | 3 +++ query-engine/driver-adapters/.gitignore | 3 +++ .../driver-adapters/connector-test-kit-executor/.gitignore | 3 +++ 3 files changed, 9 insertions(+) create mode 100644 query-engine/driver-adapters/.gitignore diff --git a/.gitignore b/.gitignore index 43e03e31867d..be185b0f7afc 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,6 @@ dmmf.json graph.dot prisma-schema-wasm/nodejs + +# This symlink looks orphan here, but it comes from prisma/prisma where driver adapters reference a file in their parent directory +tsconfig.build.adapter.json diff --git a/query-engine/driver-adapters/.gitignore b/query-engine/driver-adapters/.gitignore new file mode 100644 index 000000000000..dab5c8905550 --- /dev/null +++ b/query-engine/driver-adapters/.gitignore @@ -0,0 +1,3 @@ +node_modules +adapter-* +driver-adapter-utils diff --git a/query-engine/driver-adapters/connector-test-kit-executor/.gitignore b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore index e69de29bb2d1..37b61ff565c7 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/.gitignore +++ b/query-engine/driver-adapters/connector-test-kit-executor/.gitignore @@ -0,0 +1,3 @@ +node_modules +pnpm-debug.log +dist/ From d3d9ad43d90acfa56ebcf8ac9216d189d9609b8a Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Tue, 24 Oct 2023 16:52:58 +0200 Subject: [PATCH 05/26] Sort out dependencies --- .../connector-test-kit-executor/package.json | 10 +- .../connector-test-kit-executor/tsconfig.json | 23 + query-engine/driver-adapters/package.json | 23 + query-engine/driver-adapters/pnpm-lock.yaml | 1425 +++++++++++++++++ .../driver-adapters/pnpm-workspace.yaml | 4 + 5 files changed, 1480 insertions(+), 5 deletions(-) create mode 100644 query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json create mode 100644 query-engine/driver-adapters/package.json create mode 100644 query-engine/driver-adapters/pnpm-lock.yaml create mode 100644 query-engine/driver-adapters/pnpm-workspace.yaml diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json index 2a0d16bd4ccf..3fb965afd916 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -16,11 +16,11 @@ "@libsql/client": "0.3.5", "@neondatabase/serverless": "^0.6.0", "@planetscale/database": "1.11.0", - "@prisma/adapter-libsql": "workspace:*", - "@prisma/adapter-neon": "workspace:*", - "@prisma/adapter-pg": "workspace:*", - "@prisma/adapter-planetscale": "workspace:*", - "@prisma/driver-adapter-utils": "workspace:*", + "@prisma/adapter-libsql": "../adapter-libsql", + "@prisma/adapter-neon": "../adapter-neon", + "@prisma/adapter-pg": "../adapter-pg", + "@prisma/adapter-planetscale": "../adapter-planetscale", + "@prisma/driver-adapter-utils": "../driver-adapter-utils", "@types/pg": "^8.10.2", "pg": "^8.11.3", "undici": "^5.26.2" diff --git a/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json new file mode 100644 index 000000000000..516c114b3e15 --- /dev/null +++ b/query-engine/driver-adapters/connector-test-kit-executor/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "lib": ["ES2022"], + "moduleResolution": "Bundler", + "esModuleInterop": false, + "isolatedModules": true, + "sourceMap": true, + "declaration": true, + "strict": true, + "noImplicitAny": false, + "noUncheckedIndexedAccess": false, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "useUnknownInCatchVariables": false, + "skipDefaultLibCheck": true, + "skipLibCheck": true, + "emitDeclarationOnly": true, + "resolveJsonModule": true + }, + "exclude": ["**/dist", "**/declaration", "**/node_modules", "**/src/__tests__"] +} \ No newline at end of file diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json new file mode 100644 index 000000000000..6b8a756bc6ee --- /dev/null +++ b/query-engine/driver-adapters/package.json @@ -0,0 +1,23 @@ +{ + "private": true, + "name": "js", + "version": "0.0.2", + "description": "", + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, + "license": "Apache-2.0", + "scripts": { + "build": "pnpm -r run build", + "lint": "pnpm -r run lint" + }, + "keywords": [], + "author": "", + "devDependencies": { + "@types/node": "^20.5.1", + "tsup": "^7.2.0", + "tsx": "^3.12.7", + "typescript": "^5.1.6" + } +} \ No newline at end of file diff --git a/query-engine/driver-adapters/pnpm-lock.yaml b/query-engine/driver-adapters/pnpm-lock.yaml new file mode 100644 index 000000000000..a829827c2a8c --- /dev/null +++ b/query-engine/driver-adapters/pnpm-lock.yaml @@ -0,0 +1,1425 @@ +lockfileVersion: '6.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + '@types/node': + specifier: ^20.5.1 + version: 20.5.1 + tsup: + specifier: ^7.2.0 + version: 7.2.0(typescript@5.1.6) + tsx: + specifier: ^3.12.7 + version: 3.12.7 + typescript: + specifier: ^5.1.6 + version: 5.1.6 + + connector-test-kit-executor: + dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 + '@prisma/adapter-libsql': + specifier: ../adapter-libsql + version: link:../adapter-libsql + '@prisma/adapter-neon': + specifier: ../adapter-neon + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: ../adapter-pg + version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: ../adapter-planetscale + version: link:../adapter-planetscale + '@prisma/driver-adapter-utils': + specifier: ../driver-adapter-utils + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + undici: + specifier: ^5.26.2 + version: 5.26.2 + +packages: + + /@esbuild-kit/cjs-loader@2.4.4: + resolution: {integrity: sha512-NfsJX4PdzhwSkfJukczyUiZGc7zNNWZcEAyqeISpDnn0PTfzMJR1aR8xAIPskBejIxBJbIgCCMzbaYa9SXepIg==} + dependencies: + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 + dev: true + + /@esbuild-kit/core-utils@3.3.2: + resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==} + dependencies: + esbuild: 0.18.20 + source-map-support: 0.5.21 + dev: true + + /@esbuild-kit/esm-loader@2.6.5: + resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} + dependencies: + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.2 + dev: true + + /@esbuild/android-arm64@0.18.20: + resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@esbuild/android-arm@0.18.20: + resolution: {integrity: sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@esbuild/android-x64@0.18.20: + resolution: {integrity: sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@esbuild/darwin-arm64@0.18.20: + resolution: {integrity: sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /@esbuild/darwin-x64@0.18.20: + resolution: {integrity: sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /@esbuild/freebsd-arm64@0.18.20: + resolution: {integrity: sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /@esbuild/freebsd-x64@0.18.20: + resolution: {integrity: sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-arm64@0.18.20: + resolution: {integrity: sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-arm@0.18.20: + resolution: {integrity: sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-ia32@0.18.20: + resolution: {integrity: sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-loong64@0.18.20: + resolution: {integrity: sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-mips64el@0.18.20: + resolution: {integrity: sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-ppc64@0.18.20: + resolution: {integrity: sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-riscv64@0.18.20: + resolution: {integrity: sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-s390x@0.18.20: + resolution: {integrity: sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-x64@0.18.20: + resolution: {integrity: sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@esbuild/netbsd-x64@0.18.20: + resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + requiresBuild: true + dev: true + optional: true + + /@esbuild/openbsd-x64@0.18.20: + resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + requiresBuild: true + dev: true + optional: true + + /@esbuild/sunos-x64@0.18.20: + resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + requiresBuild: true + dev: true + optional: true + + /@esbuild/win32-arm64@0.18.20: + resolution: {integrity: sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@esbuild/win32-ia32@0.18.20: + resolution: {integrity: sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@esbuild/win32-x64@0.18.20: + resolution: {integrity: sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@fastify/busboy@2.0.0: + resolution: {integrity: sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==} + engines: {node: '>=14'} + dev: false + + /@jridgewell/gen-mapping@0.3.3: + resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.15 + '@jridgewell/trace-mapping': 0.3.20 + dev: true + + /@jridgewell/resolve-uri@3.1.1: + resolution: {integrity: sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/set-array@1.1.2: + resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/sourcemap-codec@1.4.15: + resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} + dev: true + + /@jridgewell/trace-mapping@0.3.20: + resolution: {integrity: sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==} + dependencies: + '@jridgewell/resolve-uri': 3.1.1 + '@jridgewell/sourcemap-codec': 1.4.15 + dev: true + + /@libsql/client@0.3.5: + resolution: {integrity: sha512-4fZxGh0qKW5dtp1yuQLRvRAtbt02V4jzjM9sHSmz5k25xZTLg7/GlNudKdqKZrjJXEV5PvDNsczupBtedZZovw==} + dependencies: + '@libsql/hrana-client': 0.5.5 + js-base64: 3.7.5 + libsql: 0.1.34 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + dev: false + + /@libsql/darwin-arm64@0.1.34: + resolution: {integrity: sha512-Wv8jvkj/fUAO8DF3A4HaddCMldUUpKcg/WW1sY95FNsSHOxktyxqU80jAp/tCuZ85GQIJozvgSr51/ARIC0gsw==} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@libsql/darwin-x64@0.1.34: + resolution: {integrity: sha512-2NQXD9nUzC08hg7FdcZLq5uTEwGz1KbD7YvUzQb/psO1lO/E/p83wl1es1082+Pp0z5pSPDWQeRTuccD41L+3w==} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@libsql/hrana-client@0.5.5: + resolution: {integrity: sha512-i+hDBpiV719poqEiHupUUZYKJ9YSbCRFe5Q2PQ0v3mHIftePH6gayLjp2u6TXbqbO/Dv6y8yyvYlBXf/kFfRZA==} + dependencies: + '@libsql/isomorphic-fetch': 0.1.10 + '@libsql/isomorphic-ws': 0.1.5 + js-base64: 3.7.5 + node-fetch: 3.3.2 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + dev: false + + /@libsql/isomorphic-fetch@0.1.10: + resolution: {integrity: sha512-dH0lMk50gKSvEKD78xWMu60SY1sjp1sY//iFLO0XMmBwfVfG136P9KOk06R4maBdlb8KMXOzJ1D28FR5ZKnHTA==} + dependencies: + '@types/node-fetch': 2.6.7 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + dev: false + + /@libsql/isomorphic-ws@0.1.5: + resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} + dependencies: + '@types/ws': 8.5.8 + ws: 8.14.2 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + dev: false + + /@libsql/linux-arm64-gnu@0.1.34: + resolution: {integrity: sha512-r3dY1FDYZ7eX5HX7HyAoYSqK5FPugj5NSB5Bt/nz+ygBWdXASgSKxkE/RqjJIM59vXwv300iJX9qhR5fXv8sTw==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-arm64-musl@0.1.34: + resolution: {integrity: sha512-9AE/eNb9eQRcNsLxqtpLJxVEoIMmItrdwqJDImPJtOp10rhp4U0x/9RGKerl9Mg3ObVj676pyhAR2KzyudrOfQ==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-gnu@0.1.34: + resolution: {integrity: sha512-o8toY1Txstjt13fBhZbFe8sNAW6OaS6qVcp1Bd6bHkCLSBLZ6pjJmwzQN8rFv9QFBPAnaKP3lI4vaOXXw7huTA==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-musl@0.1.34: + resolution: {integrity: sha512-EldEmcAxxNPSCjJ73oFxg81PDDIpDbPqK/QOrhmmGYLvYwrnQtVRUIbARf80JQvcy6bCxOO/Q9dh6wGhnyHyYA==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/win32-x64-msvc@0.1.34: + resolution: {integrity: sha512-jnv0qfVMnrVv00r+wUOe6DHrHuao9y1w1lN543cV2J1JdQNJT/eSZzhyZFSlS3T2ZUvXfZfZ5GeL8U18IAID6w==} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: false + optional: true + + /@neon-rs/load@0.0.4: + resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + dev: false + + /@neondatabase/serverless@0.6.0: + resolution: {integrity: sha512-qXxBRYN0m2v8kVQBfMxbzNGn2xFAhTXFibzQlE++NfJ56Shz3m7+MyBBtXDlEH+3Wfa6lToDXf1MElocY4sJ3w==} + dependencies: + '@types/pg': 8.6.6 + dev: false + + /@nodelib/fs.scandir@2.1.5: + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + dev: true + + /@nodelib/fs.stat@2.0.5: + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + dev: true + + /@nodelib/fs.walk@1.2.8: + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.15.0 + dev: true + + /@planetscale/database@1.11.0: + resolution: {integrity: sha512-aWbU+D/IRHoDE9975y+Q4c+EwwAWxCPwFId+N1AhQVFXzbeJMkj6KN2iQtoi03elcLMRdfT+V3i9Z4WRw+/oIA==} + engines: {node: '>=16'} + dev: false + + /@types/node-fetch@2.6.7: + resolution: {integrity: sha512-lX17GZVpJ/fuCjguZ5b3TjEbSENxmEk1B2z02yoXSK9WMEWRivhdSY73wWMn6bpcCDAOh6qAdktpKHIlkDk2lg==} + dependencies: + '@types/node': 20.5.1 + form-data: 4.0.0 + dev: false + + /@types/node@20.5.1: + resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} + + /@types/pg@8.10.2: + resolution: {integrity: sha512-MKFs9P6nJ+LAeHLU3V0cODEOgyThJ3OAnmOlsZsxux6sfQs3HRXR5bBn7xG5DjckEFhTAxsXi7k7cd0pCMxpJw==} + dependencies: + '@types/node': 20.5.1 + pg-protocol: 1.6.0 + pg-types: 4.0.1 + dev: false + + /@types/pg@8.6.6: + resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} + dependencies: + '@types/node': 20.5.1 + pg-protocol: 1.6.0 + pg-types: 2.2.0 + dev: false + + /@types/ws@8.5.8: + resolution: {integrity: sha512-flUksGIQCnJd6sZ1l5dqCEG/ksaoAg/eUwiLAGTJQcfgvZJKF++Ta4bJA6A5aPSJmsr+xlseHn4KLgVlNnvPTg==} + dependencies: + '@types/node': 20.5.1 + dev: false + + /any-promise@1.3.0: + resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} + dev: true + + /anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + dev: true + + /array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + dev: true + + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false + + /balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + dev: true + + /binary-extensions@2.2.0: + resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} + engines: {node: '>=8'} + dev: true + + /brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + dev: true + + /braces@3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} + dependencies: + fill-range: 7.0.1 + dev: true + + /buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + dev: true + + /buffer-writer@2.0.0: + resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} + engines: {node: '>=4'} + dev: false + + /bundle-require@4.0.2(esbuild@0.18.20): + resolution: {integrity: sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + peerDependencies: + esbuild: '>=0.17' + dependencies: + esbuild: 0.18.20 + load-tsconfig: 0.2.5 + dev: true + + /cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + dev: true + + /chokidar@3.5.3: + resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} + engines: {node: '>= 8.10.0'} + dependencies: + anymatch: 3.1.3 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + dev: true + + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: false + + /commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + dev: true + + /concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + dev: true + + /cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + dev: true + + /data-uri-to-buffer@4.0.1: + resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} + engines: {node: '>= 12'} + dev: false + + /debug@4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + dev: true + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: false + + /detect-libc@2.0.2: + resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} + engines: {node: '>=8'} + dev: false + + /dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + dependencies: + path-type: 4.0.0 + dev: true + + /esbuild@0.18.20: + resolution: {integrity: sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==} + engines: {node: '>=12'} + hasBin: true + requiresBuild: true + optionalDependencies: + '@esbuild/android-arm': 0.18.20 + '@esbuild/android-arm64': 0.18.20 + '@esbuild/android-x64': 0.18.20 + '@esbuild/darwin-arm64': 0.18.20 + '@esbuild/darwin-x64': 0.18.20 + '@esbuild/freebsd-arm64': 0.18.20 + '@esbuild/freebsd-x64': 0.18.20 + '@esbuild/linux-arm': 0.18.20 + '@esbuild/linux-arm64': 0.18.20 + '@esbuild/linux-ia32': 0.18.20 + '@esbuild/linux-loong64': 0.18.20 + '@esbuild/linux-mips64el': 0.18.20 + '@esbuild/linux-ppc64': 0.18.20 + '@esbuild/linux-riscv64': 0.18.20 + '@esbuild/linux-s390x': 0.18.20 + '@esbuild/linux-x64': 0.18.20 + '@esbuild/netbsd-x64': 0.18.20 + '@esbuild/openbsd-x64': 0.18.20 + '@esbuild/sunos-x64': 0.18.20 + '@esbuild/win32-arm64': 0.18.20 + '@esbuild/win32-ia32': 0.18.20 + '@esbuild/win32-x64': 0.18.20 + dev: true + + /execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + dependencies: + cross-spawn: 7.0.3 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + dev: true + + /fast-glob@3.3.1: + resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==} + engines: {node: '>=8.6.0'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + dev: true + + /fastq@1.15.0: + resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} + dependencies: + reusify: 1.0.4 + dev: true + + /fetch-blob@3.2.0: + resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} + engines: {node: ^12.20 || >= 14.13} + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 3.2.1 + dev: false + + /fill-range@7.0.1: + resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} + engines: {node: '>=8'} + dependencies: + to-regex-range: 5.0.1 + dev: true + + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + + /formdata-polyfill@4.0.10: + resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} + engines: {node: '>=12.20.0'} + dependencies: + fetch-blob: 3.2.0 + dev: false + + /fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + dev: true + + /fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + dev: true + + /get-tsconfig@4.7.2: + resolution: {integrity: sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==} + dependencies: + resolve-pkg-maps: 1.0.0 + dev: true + + /glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + dependencies: + is-glob: 4.0.3 + dev: true + + /glob@7.1.6: + resolution: {integrity: sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==} + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: true + + /globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.1 + ignore: 5.2.4 + merge2: 1.4.1 + slash: 3.0.0 + dev: true + + /human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + dev: true + + /ignore@5.2.4: + resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==} + engines: {node: '>= 4'} + dev: true + + /inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + dev: true + + /inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + dev: true + + /is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + dependencies: + binary-extensions: 2.2.0 + dev: true + + /is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + dev: true + + /is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + dependencies: + is-extglob: 2.1.1 + dev: true + + /is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + dev: true + + /is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + dev: true + + /isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + dev: true + + /joycon@3.1.1: + resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} + engines: {node: '>=10'} + dev: true + + /js-base64@3.7.5: + resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + dev: false + + /libsql@0.1.34: + resolution: {integrity: sha512-LGofp7z7gi1Td6vu2GxaA4WyvSPEkuFn0f/ePSti1TsAlBU0LWxdk+bj9D8nqswzxiqe5wpAyTLhVzTIYSyXEA==} + cpu: [x64, arm64] + os: [darwin, linux, win32] + dependencies: + '@neon-rs/load': 0.0.4 + detect-libc: 2.0.2 + optionalDependencies: + '@libsql/darwin-arm64': 0.1.34 + '@libsql/darwin-x64': 0.1.34 + '@libsql/linux-arm64-gnu': 0.1.34 + '@libsql/linux-arm64-musl': 0.1.34 + '@libsql/linux-x64-gnu': 0.1.34 + '@libsql/linux-x64-musl': 0.1.34 + '@libsql/win32-x64-msvc': 0.1.34 + dev: false + + /lilconfig@2.1.0: + resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} + engines: {node: '>=10'} + dev: true + + /lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + dev: true + + /load-tsconfig@0.2.5: + resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dev: true + + /lodash.sortby@4.7.0: + resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} + dev: true + + /merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + dev: true + + /merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + dev: true + + /micromatch@4.0.5: + resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + engines: {node: '>=8.6'} + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + dev: true + + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: false + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: false + + /mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + dev: true + + /minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + dependencies: + brace-expansion: 1.1.11 + dev: true + + /ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true + + /mz@2.7.0: + resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} + dependencies: + any-promise: 1.3.0 + object-assign: 4.1.1 + thenify-all: 1.6.0 + dev: true + + /node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + dev: false + + /node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: false + + /node-fetch@3.3.2: + resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + data-uri-to-buffer: 4.0.1 + fetch-blob: 3.2.0 + formdata-polyfill: 4.0.10 + dev: false + + /normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + dev: true + + /npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + dependencies: + path-key: 3.1.1 + dev: true + + /object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + dev: true + + /obuf@1.1.2: + resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + dev: false + + /once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + dependencies: + wrappy: 1.0.2 + dev: true + + /onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + dependencies: + mimic-fn: 2.1.0 + dev: true + + /packet-reader@1.0.0: + resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} + dev: false + + /path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + dev: true + + /path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + dev: true + + /path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + dev: true + + /pg-cloudflare@1.1.1: + resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} + requiresBuild: true + dev: false + optional: true + + /pg-connection-string@2.6.2: + resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} + dev: false + + /pg-int8@1.0.1: + resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} + engines: {node: '>=4.0.0'} + dev: false + + /pg-numeric@1.0.2: + resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} + engines: {node: '>=4'} + dev: false + + /pg-pool@3.6.1(pg@8.11.3): + resolution: {integrity: sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==} + peerDependencies: + pg: '>=8.0' + dependencies: + pg: 8.11.3 + dev: false + + /pg-protocol@1.6.0: + resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} + dev: false + + /pg-types@2.2.0: + resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} + engines: {node: '>=4'} + dependencies: + pg-int8: 1.0.1 + postgres-array: 2.0.0 + postgres-bytea: 1.0.0 + postgres-date: 1.0.7 + postgres-interval: 1.2.0 + dev: false + + /pg-types@4.0.1: + resolution: {integrity: sha512-hRCSDuLII9/LE3smys1hRHcu5QGcLs9ggT7I/TCs0IE+2Eesxi9+9RWAAwZ0yaGjxoWICF/YHLOEjydGujoJ+g==} + engines: {node: '>=10'} + dependencies: + pg-int8: 1.0.1 + pg-numeric: 1.0.2 + postgres-array: 3.0.2 + postgres-bytea: 3.0.0 + postgres-date: 2.0.1 + postgres-interval: 3.0.0 + postgres-range: 1.1.3 + dev: false + + /pg@8.11.3: + resolution: {integrity: sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==} + engines: {node: '>= 8.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + dependencies: + buffer-writer: 2.0.0 + packet-reader: 1.0.0 + pg-connection-string: 2.6.2 + pg-pool: 3.6.1(pg@8.11.3) + pg-protocol: 1.6.0 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.1.1 + dev: false + + /pgpass@1.0.5: + resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} + dependencies: + split2: 4.2.0 + dev: false + + /picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + dev: true + + /pirates@4.0.6: + resolution: {integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==} + engines: {node: '>= 6'} + dev: true + + /postcss-load-config@4.0.1: + resolution: {integrity: sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA==} + engines: {node: '>= 14'} + peerDependencies: + postcss: '>=8.0.9' + ts-node: '>=9.0.0' + peerDependenciesMeta: + postcss: + optional: true + ts-node: + optional: true + dependencies: + lilconfig: 2.1.0 + yaml: 2.3.3 + dev: true + + /postgres-array@2.0.0: + resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} + engines: {node: '>=4'} + dev: false + + /postgres-array@3.0.2: + resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} + engines: {node: '>=12'} + dev: false + + /postgres-bytea@1.0.0: + resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} + engines: {node: '>=0.10.0'} + dev: false + + /postgres-bytea@3.0.0: + resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==} + engines: {node: '>= 6'} + dependencies: + obuf: 1.1.2 + dev: false + + /postgres-date@1.0.7: + resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} + engines: {node: '>=0.10.0'} + dev: false + + /postgres-date@2.0.1: + resolution: {integrity: sha512-YtMKdsDt5Ojv1wQRvUhnyDJNSr2dGIC96mQVKz7xufp07nfuFONzdaowrMHjlAzY6GDLd4f+LUHHAAM1h4MdUw==} + engines: {node: '>=12'} + dev: false + + /postgres-interval@1.2.0: + resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} + engines: {node: '>=0.10.0'} + dependencies: + xtend: 4.0.2 + dev: false + + /postgres-interval@3.0.0: + resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} + engines: {node: '>=12'} + dev: false + + /postgres-range@1.1.3: + resolution: {integrity: sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g==} + dev: false + + /punycode@2.3.0: + resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} + engines: {node: '>=6'} + dev: true + + /queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + dev: true + + /readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + dependencies: + picomatch: 2.3.1 + dev: true + + /resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + dev: true + + /resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + dev: true + + /reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + dev: true + + /rollup@3.29.4: + resolution: {integrity: sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==} + engines: {node: '>=14.18.0', npm: '>=8.0.0'} + hasBin: true + optionalDependencies: + fsevents: 2.3.3 + dev: true + + /run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + dependencies: + queue-microtask: 1.2.3 + dev: true + + /shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + dependencies: + shebang-regex: 3.0.0 + dev: true + + /shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + dev: true + + /signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + dev: true + + /slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + dev: true + + /source-map-support@0.5.21: + resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + dev: true + + /source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + dev: true + + /source-map@0.8.0-beta.0: + resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} + engines: {node: '>= 8'} + dependencies: + whatwg-url: 7.1.0 + dev: true + + /split2@4.2.0: + resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} + engines: {node: '>= 10.x'} + dev: false + + /strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + dev: true + + /sucrase@3.34.0: + resolution: {integrity: sha512-70/LQEZ07TEcxiU2dz51FKaE6hCTWC6vr7FOk3Gr0U60C3shtAN+H+BFr9XlYe5xqf3RA8nrc+VIwzCfnxuXJw==} + engines: {node: '>=8'} + hasBin: true + dependencies: + '@jridgewell/gen-mapping': 0.3.3 + commander: 4.1.1 + glob: 7.1.6 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.6 + ts-interface-checker: 0.1.13 + dev: true + + /thenify-all@1.6.0: + resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} + engines: {node: '>=0.8'} + dependencies: + thenify: 3.3.1 + dev: true + + /thenify@3.3.1: + resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} + dependencies: + any-promise: 1.3.0 + dev: true + + /to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + dependencies: + is-number: 7.0.0 + dev: true + + /tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false + + /tr46@1.0.1: + resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} + dependencies: + punycode: 2.3.0 + dev: true + + /tree-kill@1.2.2: + resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} + hasBin: true + dev: true + + /ts-interface-checker@0.1.13: + resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} + dev: true + + /tsup@7.2.0(typescript@5.1.6): + resolution: {integrity: sha512-vDHlczXbgUvY3rWvqFEbSqmC1L7woozbzngMqTtL2PGBODTtWlRwGDDawhvWzr5c1QjKe4OAKqJGfE1xeXUvtQ==} + engines: {node: '>=16.14'} + hasBin: true + peerDependencies: + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: '>=4.1.0' + peerDependenciesMeta: + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + dependencies: + bundle-require: 4.0.2(esbuild@0.18.20) + cac: 6.7.14 + chokidar: 3.5.3 + debug: 4.3.4 + esbuild: 0.18.20 + execa: 5.1.1 + globby: 11.1.0 + joycon: 3.1.1 + postcss-load-config: 4.0.1 + resolve-from: 5.0.0 + rollup: 3.29.4 + source-map: 0.8.0-beta.0 + sucrase: 3.34.0 + tree-kill: 1.2.2 + typescript: 5.1.6 + transitivePeerDependencies: + - supports-color + - ts-node + dev: true + + /tsx@3.12.7: + resolution: {integrity: sha512-C2Ip+jPmqKd1GWVQDvz/Eyc6QJbGfE7NrR3fx5BpEHMZsEHoIxHL1j+lKdGobr8ovEyqeNkPLSKp6SCSOt7gmw==} + hasBin: true + dependencies: + '@esbuild-kit/cjs-loader': 2.4.4 + '@esbuild-kit/core-utils': 3.3.2 + '@esbuild-kit/esm-loader': 2.6.5 + optionalDependencies: + fsevents: 2.3.3 + dev: true + + /typescript@5.1.6: + resolution: {integrity: sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==} + engines: {node: '>=14.17'} + hasBin: true + dev: true + + /undici@5.26.2: + resolution: {integrity: sha512-a4PDLQgLTPHVzOK+x3F79/M4GtyYPl+aX9AAK7aQxpwxDwCqkeZCScy7Gk5kWT3JtdFq1uhO3uZJdLtHI4dK9A==} + engines: {node: '>=14.0'} + dependencies: + '@fastify/busboy': 2.0.0 + dev: false + + /web-streams-polyfill@3.2.1: + resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} + engines: {node: '>= 8'} + dev: false + + /webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: false + + /webidl-conversions@4.0.2: + resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} + dev: true + + /whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + dev: false + + /whatwg-url@7.1.0: + resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} + dependencies: + lodash.sortby: 4.7.0 + tr46: 1.0.1 + webidl-conversions: 4.0.2 + dev: true + + /which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + + /wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + dev: true + + /ws@8.14.2: + resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + dev: false + + /xtend@4.0.2: + resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} + engines: {node: '>=0.4'} + dev: false + + /yaml@2.3.3: + resolution: {integrity: sha512-zw0VAJxgeZ6+++/su5AFoqBbZbrEakwu+X0M5HmcwUiBL7AzcuPKjj5we4xfQLp78LkEMpD0cOnUhmgOVy3KdQ==} + engines: {node: '>= 14'} + dev: true diff --git a/query-engine/driver-adapters/pnpm-workspace.yaml b/query-engine/driver-adapters/pnpm-workspace.yaml new file mode 100644 index 000000000000..c7b49e07b95f --- /dev/null +++ b/query-engine/driver-adapters/pnpm-workspace.yaml @@ -0,0 +1,4 @@ +packages: + - "./connector-test-kit-executor" + - "./driver-adapter-utils" + - "./adapter-*" From 269d2dc9e7456a12d94de80df10787077d2dd9c3 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Tue, 24 Oct 2023 17:06:56 +0200 Subject: [PATCH 06/26] Makefile to setup driver adapters from a checkout of prisma --- Makefile | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 0c3e1541e632..4bab5ebb82ad 100644 --- a/Makefile +++ b/Makefile @@ -262,8 +262,32 @@ dev-planetscale-vitess8: start-planetscale-vitess8 build-qe-napi: cargo build --package query-engine-node-api -build-connector-kit-js: - cd query-engine/driver-adapters/js && pnpm i && pnpm build +build-connector-kit-js: symlink-driver-adapters + cd query-engine/driver-adapters && pnpm i && pnpm build + +symlink-driver-adapters: ensure-prisma-present + @echo "Creating symbolic links for driver adapters..." + @for dir in $(wildcard $(realpath ../prisma)/packages/*adapter*); do \ + if [ -d "$$dir" ]; then \ + dir_name=$$(basename "$$dir"); \ + ln -sfh "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \ + echo "Created symbolic link for $$dir_name"; \ + fi; \ + done; + @ln -sf "../prisma/tsconfig.build.adapter.json" "./tsconfig.build.adapter.json"; \ + echo "Symbolic links creation completed."; + +.PHONY: ensure-prisma-present +ensure-prisma-present: + @if [ -d ../prisma ]; then \ + cd "$(realpath ../prisma)" && git fetch origin main; \ + LOCAL_CHANGES=$$(git diff --name-only HEAD origin/main -- 'packages/*adapter*'); \ + if [ -n "$$LOCAL_CHANGES" ]; then \ + echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ + fi \ + else \ + git clone https://github.com/prisma/prisma.git "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ + fi; # Quick schema validation of whatever you have in the dev_datamodel.prisma file. validate: From 1490e5b43cfed50f8ac3d90b8c868bea48c86e7d Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Tue, 24 Oct 2023 17:10:25 +0200 Subject: [PATCH 07/26] Only clone prisma/prisma shallowly --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 4bab5ebb82ad..9e6cfc2540ad 100644 --- a/Makefile +++ b/Makefile @@ -286,7 +286,7 @@ ensure-prisma-present: echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ fi \ else \ - git clone https://github.com/prisma/prisma.git "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ + git clone --depth=1 https://github.com/prisma/prisma.git "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ fi; # Quick schema validation of whatever you have in the dev_datamodel.prisma file. From 254f5b4f5636ffccaba53d881c361491796cc969 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Tue, 24 Oct 2023 17:12:55 +0200 Subject: [PATCH 08/26] Delete driver-adapter-smoke-tests.yml --- .../workflows/driver-adapter-smoke-tests.yml | 131 ------------------ 1 file changed, 131 deletions(-) delete mode 100644 .github/workflows/driver-adapter-smoke-tests.yml diff --git a/.github/workflows/driver-adapter-smoke-tests.yml b/.github/workflows/driver-adapter-smoke-tests.yml deleted file mode 100644 index 802e3188dedc..000000000000 --- a/.github/workflows/driver-adapter-smoke-tests.yml +++ /dev/null @@ -1,131 +0,0 @@ -name: Driver Adapters, Smoke Tests -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/driver-adapter-smoke-tests.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -jobs: - driver-adapter-smoke-tests: - name: ${{ matrix.adapter }} - - strategy: - fail-fast: false - matrix: - adapter: ['neon:ws', 'neon:http', planetscale, pg, libsql] - - runs-on: ubuntu-latest - - services: - postgres: - image: postgres - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - # via package.json rewritten into DATABASE_URL before scripts are run - env: - JS_NEON_DATABASE_URL: ${{ secrets.JS_NEON_DATABASE_URL }} - JS_PLANETSCALE_DATABASE_URL: ${{ secrets.JS_PLANETSCALE_DATABASE_URL }} - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test # ${{ secrets.JS_PG_DATABASE_URL }} - # TODO: test sqld and embedded replicas - JS_LIBSQL_DATABASE_URL: file:/tmp/libsql.db - # TODO: test all three of ("number", "bigint", "string") and conditionally skip some tests as appropriate - JS_LIBSQL_INT_MODE: bigint - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - run: pnpm prisma:${{ matrix.adapter }} - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - run: pnpm ${{ matrix.adapter }}:libquery - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - name: pnpm ${{ matrix.adapter }}:client (using @prisma/client - including engine! - from Npm) - run: pnpm ${{ matrix.adapter }}:client - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js - - - driver-adapter-smoke-tests-errors: - name: Errors - - runs-on: ubuntu-latest - - # services: - # postgres: - # image: postgres - # env: - # POSTGRES_PASSWORD: postgres - # options: >- - # --health-cmd pg_isready - # --health-interval 10s - # --health-timeout 5s - # --health-retries 5 - # ports: - # - 5432:5432 - - env: - # via package.json rewritten into DATABASE_URL before scripts are run - JS_PG_DATABASE_URL: postgres://postgres:postgres@localhost:5432/test - - steps: - - uses: actions/checkout@v4 - - - uses: dtolnay/rust-toolchain@stable - - - uses: pnpm/action-setup@v2 - with: - version: 8 - - uses: actions/setup-node@v3 - with: - node-version: 18 - #cache: 'pnpm' - - - name: Compile Query Engine - run: cargo build -p query-engine-node-api - - - name: Install Dependencies (Driver Adapters) - run: pnpm install - working-directory: ./query-engine/driver-adapters/js - - name: Build Driver Adapters - run: pnpm build - working-directory: ./query-engine/driver-adapters/js - - - name: pnpm errors - run: pnpm errors - if: always() - working-directory: ./query-engine/driver-adapters/js/smoke-test-js From 03913caed3d73fcce8dfc312af8733ae9f0f4dd2 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 12:33:50 +0200 Subject: [PATCH 09/26] DRIVER_ADAPTERS_BRANCH=driver-adapters-migration see if this works --- .github/workflows/query-engine-driver-adapters.yml | 8 ++++++++ Makefile | 3 ++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index d52b446b12fb..bc38f72572f7 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -72,6 +72,14 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Extract Branch Name + id: extract-branch + run: echo "::set-output name=branch::$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + + - name: Set Environment Variable + if: steps.extract-branch.outputs.branch != '' + run: echo "DRIVER_ADAPTERS_BRANCH=${{ steps.extract-branch.outputs.branch }}" >> $GITHUB_ENV + - run: make ${{ matrix.adapter.setup_task }} - uses: dtolnay/rust-toolchain@stable diff --git a/Makefile b/Makefile index 9e6cfc2540ad..05ed8eee7c14 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,7 @@ CONFIG_PATH = ./query-engine/connector-test-kit-rs/test-configs CONFIG_FILE = .test_config SCHEMA_EXAMPLES_PATH = ./query-engine/example_schemas DEV_SCHEMA_FILE = dev_datamodel.prisma +DRIVER_ADAPTERS_BRANCH ?= main LIBRARY_EXT := $(shell \ case "$$(uname -s)" in \ @@ -286,7 +287,7 @@ ensure-prisma-present: echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ fi \ else \ - git clone --depth=1 https://github.com/prisma/prisma.git "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ + git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ fi; # Quick schema validation of whatever you have in the dev_datamodel.prisma file. From e0ea5289062704efb97f05efd2517c5dfb85aaf8 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 13:22:42 +0200 Subject: [PATCH 10/26] DRIVER_ADAPTERS_BRANCH=driver-adapters-migration change deprecated set-output command --- .github/workflows/query-engine-driver-adapters.yml | 2 +- Makefile | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index bc38f72572f7..8abb518496c9 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -74,7 +74,7 @@ jobs: - name: Extract Branch Name id: extract-branch - run: echo "::set-output name=branch::$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + run: echo branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" >> $GITHUB_OUTPUT - name: Set Environment Variable if: steps.extract-branch.outputs.branch != '' diff --git a/Makefile b/Makefile index 05ed8eee7c14..dfbd75355760 100644 --- a/Makefile +++ b/Makefile @@ -287,6 +287,7 @@ ensure-prisma-present: echo "⚠️ ../prisma diverges from prisma/prisma main branch. Test results might diverge from those in CI ⚠️ "; \ fi \ else \ + echo "git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) ../prisma"; \ git clone --depth=1 https://github.com/prisma/prisma.git --branch=$(DRIVER_ADAPTERS_BRANCH) "../prisma" && echo "Prisma repository has been cloned to ../prisma"; \ fi; From 463775a8b467ff7aa0d3e21063f1e617014f4b81 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 13:30:33 +0200 Subject: [PATCH 11/26] DRIVER_ADAPTERS_BRANCH=driver-adapters-migration tmp remove --- .github/workflows/benchmark.yml | 42 ---- .github/workflows/build-apple-intel.yml | 49 ---- .github/workflows/build-apple-silicon.yml | 51 ---- .github/workflows/build-wasm.yml | 25 -- .github/workflows/build-windows.yml | 44 ---- .github/workflows/compilation.yml | 40 --- .github/workflows/formatting.yml | 46 ---- .github/workflows/on-push-to-main.yml | 29 --- .github/workflows/publish-driver-adapters.yml | 83 ------ .../workflows/publish-prisma-schema-wasm.yml | 70 ------ .github/workflows/quaint.yml | 58 ----- .github/workflows/query-engine-black-box.yml | 68 ----- .../query-engine-driver-adapters.yml | 6 - .github/workflows/query-engine.yml | 100 -------- .github/workflows/schema-engine.yml | 238 ------------------ .github/workflows/send-tag-event.yml | 24 -- .github/workflows/unit-tests.yml | 45 ---- 17 files changed, 1018 deletions(-) delete mode 100644 .github/workflows/benchmark.yml delete mode 100644 .github/workflows/build-apple-intel.yml delete mode 100644 .github/workflows/build-apple-silicon.yml delete mode 100644 .github/workflows/build-wasm.yml delete mode 100644 .github/workflows/build-windows.yml delete mode 100644 .github/workflows/compilation.yml delete mode 100644 .github/workflows/formatting.yml delete mode 100644 .github/workflows/on-push-to-main.yml delete mode 100644 .github/workflows/publish-driver-adapters.yml delete mode 100644 .github/workflows/publish-prisma-schema-wasm.yml delete mode 100644 .github/workflows/quaint.yml delete mode 100644 .github/workflows/query-engine-black-box.yml delete mode 100644 .github/workflows/query-engine.yml delete mode 100644 .github/workflows/schema-engine.yml delete mode 100644 .github/workflows/send-tag-event.yml delete mode 100644 .github/workflows/unit-tests.yml diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml deleted file mode 100644 index 4dbfa4855fc9..000000000000 --- a/.github/workflows/benchmark.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: Benchmark -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/benchmark.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - benchmark: - name: 'Run benchmarks on Linux' - - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - - name: Install cargo-codspeed - run: cargo install cargo-codspeed - - - name: 'Build the benchmark targets: schema' - run: cargo codspeed build -p schema - - - name: 'Build the benchmark targets: request-handlers' - run: cargo codspeed build -p request-handlers - - - name: Run the benchmarks - uses: CodSpeedHQ/action@v1 - with: - run: cargo codspeed run - token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.github/workflows/build-apple-intel.yml b/.github/workflows/build-apple-intel.yml deleted file mode 100644 index 994cbfbb0ad0..000000000000 --- a/.github/workflows/build-apple-intel.yml +++ /dev/null @@ -1,49 +0,0 @@ -on: - workflow_dispatch: - inputs: - commit: - description: "Commit on the given branch to build" - required: false - -jobs: - build: - # Do not change `name`, prisma-engines Buildkite build job depends on this name ending with the commit - name: "MacOS Intel engines build on branch ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}" - env: - SQLITE_MAX_VARIABLE_NUMBER: 250000 - SQLITE_MAX_EXPR_DEPTH: 10000 - - # minimum supported version of macOS - MACOSX_DEPLOYMENT_TARGET: 10.15 - runs-on: macos-13 - - steps: - - name: Output link to real commit - run: echo ${{ github.repository }}/commit/${{ github.event.inputs.commit }} - - - name: Checkout ${{ github.event.inputs.commit }} - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.commit }} - - - uses: dtolnay/rust-toolchain@stable - - - uses: actions/cache@v3 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-intel-cargo-${{ hashFiles('**/Cargo.lock') }} - - - run: | - cargo build --release -p query-engine -p query-engine-node-api -p schema-engine-cli -p prisma-fmt - - - uses: actions/upload-artifact@v3 - with: - name: binaries - path: | - ${{ github.workspace }}/target/release/schema-engine - ${{ github.workspace }}/target/release/prisma-fmt - ${{ github.workspace }}/target/release/query-engine - ${{ github.workspace }}/target/release/libquery_engine.dylib diff --git a/.github/workflows/build-apple-silicon.yml b/.github/workflows/build-apple-silicon.yml deleted file mode 100644 index 74c49c5154fa..000000000000 --- a/.github/workflows/build-apple-silicon.yml +++ /dev/null @@ -1,51 +0,0 @@ -on: - workflow_dispatch: - inputs: - commit: - description: "Commit on the given branch to build" - required: false - -jobs: - build: - # Do not change `name`, prisma-engines Buildkite build job depends on this name ending with the commit - name: "MacOS ARM64 (Apple Silicon) engines build on branch ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}" - env: - SQLITE_MAX_VARIABLE_NUMBER: 250000 - SQLITE_MAX_EXPR_DEPTH: 10000 - runs-on: macos-13 - - steps: - - name: Output link to real commit - run: echo ${{ github.repository }}/commit/${{ github.event.inputs.commit }} - - - name: Checkout ${{ github.event.inputs.commit }} - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.commit }} - - - uses: dtolnay/rust-toolchain@stable - - - name: Install aarch64 toolchain - run: rustup target add aarch64-apple-darwin - - - uses: actions/cache@v3 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - - - run: xcodebuild -showsdks - - - run: | - cargo build --target=aarch64-apple-darwin --release -p query-engine -p query-engine-node-api -p schema-engine-cli -p prisma-fmt - - - uses: actions/upload-artifact@v3 - with: - name: binaries - path: | - ${{ github.workspace }}/target/aarch64-apple-darwin/release/schema-engine - ${{ github.workspace }}/target/aarch64-apple-darwin/release/prisma-fmt - ${{ github.workspace }}/target/aarch64-apple-darwin/release/query-engine - ${{ github.workspace }}/target/aarch64-apple-darwin/release/libquery_engine.dylib diff --git a/.github/workflows/build-wasm.yml b/.github/workflows/build-wasm.yml deleted file mode 100644 index 7969cd2dd462..000000000000 --- a/.github/workflows/build-wasm.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: WASM build -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/build-wasm.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -jobs: - build: - name: 'prisma-schema-wasm build ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: cachix/install-nix-action@v23 - - - run: nix build .#prisma-schema-wasm - - run: nix flake check diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml deleted file mode 100644 index 1dcd11f092ee..000000000000 --- a/.github/workflows/build-windows.yml +++ /dev/null @@ -1,44 +0,0 @@ -on: - workflow_dispatch: - inputs: - commit: - description: "Commit on the given branch to build" - required: true - -jobs: - build: - # Do not change `name`, prisma-engines Buildkite build job depends on this name ending with the commit - name: "Windows engines build on branch ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}" - env: - SQLITE_MAX_VARIABLE_NUMBER: 250000 - SQLITE_MAX_EXPR_DEPTH: 10000 - RUSTFLAGS: "-C target-feature=+crt-static" - runs-on: windows-latest - - steps: - - name: Output link to real commit - run: echo ${{ github.repository }}/commit/${{ github.event.inputs.commit }} - - - name: Checkout ${{ github.event.inputs.commit }} - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.commit }} - - - uses: dtolnay/rust-toolchain@stable - - - uses: actions/cache@v3 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - - - run: cargo build --release - - - uses: actions/upload-artifact@v3 - with: - name: binaries - path: | - ${{ github.workspace }}/target/release/*.exe - ${{ github.workspace }}/target/release/*.dll diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml deleted file mode 100644 index d9f81f47772b..000000000000 --- a/.github/workflows/compilation.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: 'Release binary compilation test' -on: - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/compilation.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - test-crate-compilation: - name: 'Compile top level crates on Linux' - strategy: - fail-fast: false - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - - run: 'cargo clean && cargo build --release -p schema-engine-cli' - name: 'Compile Migration Engine' - - - run: 'cargo clean && cargo build --release -p prisma-fmt' - name: 'Compile prisma-fmt' - - - run: 'cargo clean && cargo build --release -p query-engine' - name: 'Compile Query Engine Binary' - - - run: 'cargo clean && cargo build --release -p query-engine-node-api' - name: 'Compile Query Engine Library' - - - name: 'Check that Cargo.lock did not change' - run: 'git diff --exit-code' diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml deleted file mode 100644 index 50b635544b91..000000000000 --- a/.github/workflows/formatting.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: Formatting -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/formatting.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - clippy: - runs-on: ubuntu-latest - env: - RUSTFLAGS: '-Dwarnings' - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - with: - components: clippy - - run: cargo clippy --all-features - - format: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - with: - components: rustfmt - - name: Check formatting - run: cargo fmt -- --check - shellcheck: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Run ShellCheck - uses: ludeeus/action-shellcheck@master diff --git a/.github/workflows/on-push-to-main.yml b/.github/workflows/on-push-to-main.yml deleted file mode 100644 index c9c153b1f12d..000000000000 --- a/.github/workflows/on-push-to-main.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Generate cargo docs & engines size -on: - workflow_dispatch: - push: - branches: - - main - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - publish-to-gh-pages: - runs-on: ubuntu-latest - strategy: - fail-fast: true - - steps: - - uses: actions/checkout@v4 - - uses: cachix/install-nix-action@v23 - - run: | - git config user.email "prismabots@gmail.com" - git config user.name "prisma-bot" - - - name: Generate cargo docs for the workspace to gh-pages branch - run: nix run .#publish-cargo-docs - - - name: Publish engines size to gh-pages branch - run: nix run .#publish-engine-size diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml deleted file mode 100644 index 7da972c35e1b..000000000000 --- a/.github/workflows/publish-driver-adapters.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: Build and publish Prisma Driver Adapters -run-name: npm - release Driver Adapters ${{ github.event.inputs.prismaVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} - -concurrency: publish-prisma-driver-adapters - -on: - # usually triggered via GH Actions Workflow in prisma/prisma repo - workflow_dispatch: - inputs: - enginesHash: - description: Engine commit hash to checkout for publishing - required: true - prismaVersion: - description: Prisma version to use for publishing - required: true - npmDistTag: - description: npm dist-tag to use for publishing - required: true - default: 'latest' - dryRun: - description: 'Check to do a dry run (does not publish packages)' - type: boolean - -jobs: - build: - name: Build and publish Prisma Driver Adapters - runs-on: ubuntu-latest - steps: - - name: Print input - env: - THE_INPUT: '${{ toJson(github.event.inputs) }}' - run: | - echo $THE_INPUT - - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.enginesHash }} - - - uses: pnpm/action-setup@v2.4.0 - with: - version: 8 - - - uses: actions/setup-node@v3 - with: - node-version: '20.x' - registry-url: 'https://registry.npmjs.org/' - - - name: Install dependencies - run: pnpm i - working-directory: query-engine/driver-adapters/js - - - name: Build - run: pnpm -r build - working-directory: query-engine/driver-adapters/js - - - name: Update version in package.json - run: | - # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result - find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; - working-directory: query-engine/driver-adapters/js - - - name: Publish Prisma Driver Adapters packages - run: | - pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} ${{ env.DRY_RUN }} - working-directory: query-engine/driver-adapters/js - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - DRY_RUN: ${{ github.event.inputs.dryRun == 'true' && '--dry-run' || '' }} - - # - # Failure handlers - # - - - name: Set current job url in SLACK_FOOTER env var - if: ${{ failure() }} - run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure - if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 - env: - SLACK_TITLE: 'prisma driver adapters publishing failed :x:' - SLACK_COLOR: '#FF0000' - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml deleted file mode 100644 index f453811009ce..000000000000 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Build and publish @prisma/prisma-schema-wasm - -concurrency: build-prisma-schema-wasm - -on: - # usually triggered via GH Actions Workflow in prisma/engines-wrapper repo - workflow_dispatch: - inputs: - enginesWrapperVersion: - required: true - enginesHash: - required: true - npmDistTag: - required: true - default: 'latest' - -jobs: - build: - name: Build and publish @prisma/prisma-schema-wasm - runs-on: ubuntu-latest - steps: - - name: Print input - env: - THE_INPUT: '${{ toJson(github.event.inputs) }}' - run: | - echo $THE_INPUT - - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.enginesHash }} - - uses: cachix/install-nix-action@v23 - - # - # Build - # - - - run: nix build .#prisma-schema-wasm - - # - # Publish - # - - - uses: actions/setup-node@v3 - with: - node-version: '14.x' - - - name: Set up NPM token - run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - - - run: | - PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) - npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - - # - # Failure handlers - # - - - name: Set current job url in SLACK_FOOTER env var - if: ${{ failure() }} - run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - - name: Slack Notification on Failure - if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 - env: - SLACK_TITLE: 'prisma-schema-wasm publishing failed :x:' - SLACK_COLOR: '#FF0000' - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml deleted file mode 100644 index 7b49e80a7bd0..000000000000 --- a/.github/workflows/quaint.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: Quaint -on: - push: - branches: - - main - pull_request: - paths: - - 'quaint/**' - -jobs: - tests: - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - features: - - "--lib --features=all" - - "--lib --no-default-features --features=sqlite" - - "--lib --no-default-features --features=sqlite --features=pooled" - - "--lib --no-default-features --features=postgresql" - - "--lib --no-default-features --features=postgresql --features=pooled" - - "--lib --no-default-features --features=mysql" - - "--lib --no-default-features --features=mysql --features=pooled" - - "--lib --no-default-features --features=mssql" - - "--lib --no-default-features --features=mssql --features=pooled" - env: - TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" - TEST_MYSQL8: "mysql://root:prisma@localhost:3307/prisma" - TEST_MYSQL_MARIADB: "mysql://root:prisma@localhost:3308/prisma" - TEST_PSQL: "postgres://postgres:prisma@localhost:5432/postgres" - TEST_MSSQL: "jdbc:sqlserver://localhost:1433;database=master;user=SA;password=;trustServerCertificate=true" - TEST_CRDB: "postgresql://prisma@127.0.0.1:26259/postgres" - - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - - uses: actions/cache@v3 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-${{ matrix.features }} - - - name: Start Databases - run: docker-compose -f docker-compose.yml up -d - working-directory: ./quaint - - - name: Sleep for 20s - uses: juliangruber/sleep-action@v1 - with: - time: 20s - - - name: Run tests - run: cargo test ${{ matrix.features }} - working-directory: ./quaint diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml deleted file mode 100644 index 78e60178d7f7..000000000000 --- a/.github/workflows/query-engine-black-box.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: Query Engine Black Box -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/query-engine-black-box.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - rust-tests: - name: 'Test query-engine as a black-box' - - strategy: - fail-fast: false - matrix: - database: - - name: 'postgres15' - single_threaded: false - connector: 'postgres' - version: '15' - - env: - LOG_LEVEL: 'info' - LOG_QUERIES: 'y' - RUST_LOG_FORMAT: 'devel' - RUST_BACKTRACE: '1' - CLICOLOR_FORCE: '1' - CLOSED_TX_CLEANUP: '2' - SIMPLE_TEST_MODE: '1' - QUERY_BATCH_SIZE: '10' - TEST_RUNNER: 'direct' - TEST_CONNECTOR: ${{ matrix.database.connector }} - TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} - - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Login to Docker Hub - uses: docker/login-action@v3 - continue-on-error: true - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: 'Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})' - run: make start-${{ matrix.database.name }} - - - uses: dtolnay/rust-toolchain@stable - - - run: export WORKSPACE_ROOT=$(pwd) && cargo build --package query-engine - env: - CLICOLOR_FORCE: 1 - - - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package black-box-tests -- --test-threads=1 - env: - CLICOLOR_FORCE: 1 diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 8abb518496c9..f37b6ff2e33e 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -25,14 +25,8 @@ jobs: fail-fast: false matrix: adapter: - - name: 'pg' - setup_task: 'dev-pg-postgres13' - name: 'neon:ws' setup_task: 'dev-neon-ws-postgres13' - - name: 'libsql' - setup_task: 'dev-libsql-sqlite' - - name: 'planetscale' - setup_task: 'dev-planetscale-vitess8' node_version: ['18'] env: LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml deleted file mode 100644 index 9c242217662d..000000000000 --- a/.github/workflows/query-engine.yml +++ /dev/null @@ -1,100 +0,0 @@ -name: Query Engine -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/query-engine.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - rust-query-engine-tests: - name: 'Test ${{ matrix.database.name }} (${{ matrix.engine_protocol }}) on Linux' - - strategy: - fail-fast: false - matrix: - database: - - name: 'vitess_5_7' - single_threaded: true - connector: 'vitess' - version: '5.7' - - name: 'vitess_8_0' - single_threaded: true - connector: 'vitess' - version: '8.0' - - name: 'postgres15' - single_threaded: true - connector: 'postgres' - version: '15' - - name: 'mssql_2022' - single_threaded: false - connector: 'sqlserver' - version: '2022' - - name: 'mongodb_4_2' - single_threaded: true - connector: 'mongodb' - version: '4.2' - - name: 'cockroach_23_1' - single_threaded: false - connector: 'cockroachdb' - version: '23.1' - - name: 'cockroach_22_2' - single_threaded: false - connector: 'cockroachdb' - version: '22.2' - - name: 'cockroach_22_1_0' - single_threaded: false - connector: 'cockroachdb' - version: '22.1' - engine_protocol: [graphql, json] - - env: - LOG_LEVEL: 'info' - LOG_QUERIES: 'y' - RUST_LOG_FORMAT: 'devel' - RUST_BACKTRACE: '1' - CLICOLOR_FORCE: '1' - CLOSED_TX_CLEANUP: '2' - SIMPLE_TEST_MODE: '1' - QUERY_BATCH_SIZE: '10' - TEST_RUNNER: 'direct' - TEST_CONNECTOR: ${{ matrix.database.connector }} - TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} - PRISMA_ENGINE_PROTOCOL: ${{ matrix.engine_protocol }} - - runs-on: buildjet-16vcpu-ubuntu-2004 - steps: - - uses: actions/checkout@v4 - - - name: Login to Docker Hub - uses: docker/login-action@v3 - continue-on-error: true - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: 'Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})' - run: make start-${{ matrix.database.name }} - - - uses: dtolnay/rust-toolchain@stable - - - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package query-engine-tests -- --test-threads=1 - if: ${{ matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - - - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package query-engine-tests -- --test-threads=8 - if: ${{ !matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml deleted file mode 100644 index 5bdf25a2bd35..000000000000 --- a/.github/workflows/schema-engine.yml +++ /dev/null @@ -1,238 +0,0 @@ -name: Schema Engine -on: - push: - branches: - - main - pull_request: - paths-ignore: - # Generic - - '.github/**' - - '!.github/workflows/schema-engine.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - # Specific - - 'query-engine/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - test-mongodb-schema-connector: - name: 'Test ${{ matrix.database.name }} on Linux' - strategy: - fail-fast: false - matrix: - database: - - name: 'mongodb42' - url: 'mongodb://prisma:prisma@localhost:27016/?authSource=admin&retryWrites=true' - - name: 'mongodb44' - url: 'mongodb://prisma:prisma@localhost:27017/?authSource=admin&retryWrites=true' - - name: 'mongodb5' - url: 'mongodb://prisma:prisma@localhost:27018/?authSource=admin&retryWrites=true' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - - name: Login to Docker Hub - uses: docker/login-action@v3 - continue-on-error: true - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: 'Start ${{ matrix.database.name }}' - run: make start-${{ matrix.database.name }}-single - - - run: cargo test -p mongodb-schema-connector - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - - test-linux: - name: 'Test ${{ matrix.database.name }} on Linux' - - strategy: - fail-fast: false - matrix: - database: - - name: mssql_2017 - url: 'sqlserver://localhost:1434;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED' - - name: mssql_2019 - url: 'sqlserver://localhost:1433;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED' - - name: mysql_5_6 - url: 'mysql://root:prisma@localhost:3309' - - name: mysql_5_7 - url: 'mysql://root:prisma@localhost:3306' - - name: mysql_8 - url: 'mysql://root:prisma@localhost:3307' - - name: mysql_mariadb - url: 'mysql://root:prisma@localhost:3308' - - name: postgres9 - url: 'postgresql://postgres:prisma@localhost:5431' - - name: postgres10 - url: 'postgresql://postgres:prisma@localhost:5432' - - name: postgres11 - url: 'postgresql://postgres:prisma@localhost:5433' - - name: postgres12 - url: 'postgresql://postgres:prisma@localhost:5434' - - name: postgres13 - url: 'postgresql://postgres:prisma@localhost:5435' - - name: postgres14 - url: 'postgresql://postgres:prisma@localhost:5437' - - name: postgres15 - url: 'postgresql://postgres:prisma@localhost:5438' - - name: cockroach_23_1 - url: 'postgresql://prisma@localhost:26260' - - name: cockroach_22_2 - url: 'postgresql://prisma@localhost:26259' - - name: cockroach_22_1_0 - url: 'postgresql://prisma@localhost:26257' - - name: sqlite - url: sqlite - - name: vitess_5_7 - url: 'mysql://root:prisma@localhost:33577/test' - shadow_database_url: 'mysql://root:prisma@localhost:33578/shadow' - is_vitess: true - single_threaded: true - - name: vitess_8_0 - url: 'mysql://root:prisma@localhost:33807/test' - shadow_database_url: 'mysql://root:prisma@localhost:33808/shadow' - is_vitess: true - single_threaded: true - - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - - name: Login to Docker Hub - uses: docker/login-action@v3 - continue-on-error: true - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: 'Start ${{ matrix.database.name }}' - run: make start-${{ matrix.database.name }} - - - run: cargo test -p sql-introspection-tests - if: ${{ !matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - - - run: cargo test -p sql-schema-describer - if: ${{ !matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - - - run: cargo test -p sql-migration-tests - if: ${{ !matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - RUST_LOG: debug - - - run: cargo test -p schema-engine-cli - if: ${{ !matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - - - run: cargo test -p sql-introspection-tests -- --test-threads=1 - if: ${{ matrix.database.is_vitess }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - TEST_SHADOW_DATABASE_URL: ${{ matrix.database.shadow_database_url }} - - - run: cargo test -p sql-migration-tests -- --test-threads=1 - if: ${{ matrix.database.is_vitess }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - TEST_SHADOW_DATABASE_URL: ${{ matrix.database.shadow_database_url }} - RUST_LOG: debug - - - run: cargo test -p schema-engine-cli -- --test-threads=1 - if: ${{ matrix.database.is_vitess }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - TEST_SHADOW_DATABASE_URL: ${{ matrix.database.shadow_database_url }} - - - run: cargo test -p sql-schema-describer -- --test-threads=1 - if: ${{ !matrix.database.is_vitess && matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - - - run: cargo test -p sql-introspection-tests -- --test-threads=1 - if: ${{ !matrix.database.is_vitess && matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - - - run: cargo test -p sql-migration-tests -- --test-threads=1 - if: ${{ !matrix.database.is_vitess && matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - RUST_LOG: debug - - - run: cargo test -p schema-engine-cli -- --test-threads=1 - if: ${{ !matrix.database.is_vitess && matrix.database.single_threaded }} - env: - CLICOLOR_FORCE: 1 - TEST_DATABASE_URL: ${{ matrix.database.url }} - - test-windows: - strategy: - fail-fast: false - matrix: - db: - - name: mysql - url: 'mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60' - - name: mariadb - url: 'mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60' - rust: - - stable - os: - - windows-latest - - runs-on: ${{ matrix.os }} - - name: 'Test ${{ matrix.db.name }} on Windows' - - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - - uses: actions/cache@v3 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-${{ matrix.rust }}-${{ hashFiles('**/Cargo.lock') }} - - - name: Install ${{ matrix.db.name }} - run: | - iwr -useb 'https://raw.githubusercontent.com/scoopinstaller/install/master/install.ps1' -outfile 'install.ps1' - .\install.ps1 -RunAsAdmin - - scoop install sudo - scoop install ${{ matrix.db.name }} - sudo mysqld --install - sudo sc start MySQL - - - name: Run tests - run: cargo test -p sql-migration-tests - env: - TEST_DATABASE_URL: ${{ matrix.db.url }} diff --git a/.github/workflows/send-tag-event.yml b/.github/workflows/send-tag-event.yml deleted file mode 100644 index 2088e258ed49..000000000000 --- a/.github/workflows/send-tag-event.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Send Tag Event - -on: - push: - tags: - - '**' - -jobs: - send-tag-event: - runs-on: ubuntu-22.04 - steps: - - run: echo "Sending event for tag ${{ github.ref_name }}" - - uses: actions/github-script@v6 - with: - github-token: ${{ secrets.ACCELERATE_DOCKER_QE_BUILD }} - script: | - await github.rest.repos.createDispatchEvent({ - owner: 'prisma', - repo: 'pdp-cloudflare', - event_type: "engines_release", - client_payload: { - prisma_version: '${{ github.ref_name }}', - }, - }) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml deleted file mode 100644 index b852499205e9..000000000000 --- a/.github/workflows/unit-tests.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: Unit tests -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/unit-tests.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - test: - name: Workspace unit tests - - strategy: - fail-fast: false - - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - - run: | - cargo test --workspace \ - --exclude=quaint \ - --exclude=query-engine \ - --exclude=query-engine-node-api \ - --exclude=black-box-tests \ - --exclude=query-engine-tests \ - --exclude=sql-migration-tests \ - --exclude=schema-engine-cli \ - --exclude=sql-schema-describer \ - --exclude=sql-introspection-tests \ - --exclude=mongodb-schema-connector - env: - CLICOLOR_FORCE: 1 From 418c50d3af13e244170d34656c762dec7f01b2c7 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 13:49:45 +0200 Subject: [PATCH 12/26] DRIVER_ADAPTERS_BRANCH=driver-adapters-migration clearer detection of branch --- .github/workflows/query-engine-driver-adapters.yml | 13 ++++++++----- Makefile | 12 ++++++++---- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index f37b6ff2e33e..79279d617ec4 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -43,6 +43,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: 'Setup Node.js' uses: actions/setup-node@v3 @@ -68,11 +70,12 @@ jobs: - name: Extract Branch Name id: extract-branch - run: echo branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" >> $GITHUB_OUTPUT - - - name: Set Environment Variable - if: steps.extract-branch.outputs.branch != '' - run: echo "DRIVER_ADAPTERS_BRANCH=${{ steps.extract-branch.outputs.branch }}" >> $GITHUB_ENV + run: | + branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + if [ -n $branch ]; then + echo "Using $branch branch of driver adapters" + echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" + fi - run: make ${{ matrix.adapter.setup_task }} diff --git a/Makefile b/Makefile index dfbd75355760..9ada12f6465f 100644 --- a/Makefile +++ b/Makefile @@ -263,22 +263,26 @@ dev-planetscale-vitess8: start-planetscale-vitess8 build-qe-napi: cargo build --package query-engine-node-api -build-connector-kit-js: symlink-driver-adapters +build-connector-kit-js: build-driver-adapters symlink-driver-adapters cd query-engine/driver-adapters && pnpm i && pnpm build +build-driver-adapters: ensure-prisma-present + @echo "Building driver adapters..." + @cd ../prisma && pnpm --filter "*adapter*" i && pnpm --filter "*adapter*" build + @echo "Driver adapters build completed."; + symlink-driver-adapters: ensure-prisma-present @echo "Creating symbolic links for driver adapters..." @for dir in $(wildcard $(realpath ../prisma)/packages/*adapter*); do \ if [ -d "$$dir" ]; then \ dir_name=$$(basename "$$dir"); \ - ln -sfh "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \ + ln -sfn "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \ echo "Created symbolic link for $$dir_name"; \ fi; \ done; - @ln -sf "../prisma/tsconfig.build.adapter.json" "./tsconfig.build.adapter.json"; \ + @ln -sfn "../prisma/tsconfig.build.adapter.json" "./tsconfig.build.adapter.json"; \ echo "Symbolic links creation completed."; -.PHONY: ensure-prisma-present ensure-prisma-present: @if [ -d ../prisma ]; then \ cd "$(realpath ../prisma)" && git fetch origin main; \ From 67a89bd4772786deb79d8c7b148c0ccb66bb5118 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 15:08:26 +0200 Subject: [PATCH 13/26] DRIVER_ADAPTERS_BRANCH=driver-adapters-migration Build executor separately --- Makefile | 2 +- .../connector-test-kit-executor/package.json | 14 ++- .../pnpm-lock.yaml | 100 +++++++++--------- query-engine/driver-adapters/package.json | 23 ---- .../driver-adapters/pnpm-workspace.yaml | 4 - 5 files changed, 61 insertions(+), 82 deletions(-) rename query-engine/driver-adapters/{ => connector-test-kit-executor}/pnpm-lock.yaml (97%) delete mode 100644 query-engine/driver-adapters/package.json delete mode 100644 query-engine/driver-adapters/pnpm-workspace.yaml diff --git a/Makefile b/Makefile index 9ada12f6465f..e51304cb9f12 100644 --- a/Makefile +++ b/Makefile @@ -264,7 +264,7 @@ build-qe-napi: cargo build --package query-engine-node-api build-connector-kit-js: build-driver-adapters symlink-driver-adapters - cd query-engine/driver-adapters && pnpm i && pnpm build + cd query-engine/driver-adapters/connector-test-kit-executor && pnpm i && pnpm build build-driver-adapters: ensure-prisma-present @echo "Building driver adapters..." diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json index 3fb965afd916..b4da6f9f0b24 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -1,6 +1,10 @@ { + "engines": { + "node": ">=16.13", + "pnpm": ">=8.6.6 <9" + }, "name": "connector-test-kit-executor", - "version": "5.4.0", + "version": "0.0.1", "description": "", "main": "dist/index.js", "private": true, @@ -24,5 +28,11 @@ "@types/pg": "^8.10.2", "pg": "^8.11.3", "undici": "^5.26.2" + }, + "devDependencies": { + "@types/node": "^20.5.1", + "tsup": "^7.2.0", + "tsx": "^3.12.7", + "typescript": "^5.1.6" } -} +} \ No newline at end of file diff --git a/query-engine/driver-adapters/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml similarity index 97% rename from query-engine/driver-adapters/pnpm-lock.yaml rename to query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index a829827c2a8c..4ec9635190b0 100644 --- a/query-engine/driver-adapters/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -4,58 +4,54 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false -importers: - - .: - devDependencies: - '@types/node': - specifier: ^20.5.1 - version: 20.5.1 - tsup: - specifier: ^7.2.0 - version: 7.2.0(typescript@5.1.6) - tsx: - specifier: ^3.12.7 - version: 3.12.7 - typescript: - specifier: ^5.1.6 - version: 5.1.6 - - connector-test-kit-executor: - dependencies: - '@libsql/client': - specifier: 0.3.5 - version: 0.3.5 - '@neondatabase/serverless': - specifier: ^0.6.0 - version: 0.6.0 - '@planetscale/database': - specifier: 1.11.0 - version: 1.11.0 - '@prisma/adapter-libsql': - specifier: ../adapter-libsql - version: link:../adapter-libsql - '@prisma/adapter-neon': - specifier: ../adapter-neon - version: link:../adapter-neon - '@prisma/adapter-pg': - specifier: ../adapter-pg - version: link:../adapter-pg - '@prisma/adapter-planetscale': - specifier: ../adapter-planetscale - version: link:../adapter-planetscale - '@prisma/driver-adapter-utils': - specifier: ../driver-adapter-utils - version: link:../driver-adapter-utils - '@types/pg': - specifier: ^8.10.2 - version: 8.10.2 - pg: - specifier: ^8.11.3 - version: 8.11.3 - undici: - specifier: ^5.26.2 - version: 5.26.2 +dependencies: + '@libsql/client': + specifier: 0.3.5 + version: 0.3.5 + '@neondatabase/serverless': + specifier: ^0.6.0 + version: 0.6.0 + '@planetscale/database': + specifier: 1.11.0 + version: 1.11.0 + '@prisma/adapter-libsql': + specifier: ../adapter-libsql + version: link:../adapter-libsql + '@prisma/adapter-neon': + specifier: ../adapter-neon + version: link:../adapter-neon + '@prisma/adapter-pg': + specifier: ../adapter-pg + version: link:../adapter-pg + '@prisma/adapter-planetscale': + specifier: ../adapter-planetscale + version: link:../adapter-planetscale + '@prisma/driver-adapter-utils': + specifier: ../driver-adapter-utils + version: link:../driver-adapter-utils + '@types/pg': + specifier: ^8.10.2 + version: 8.10.2 + pg: + specifier: ^8.11.3 + version: 8.11.3 + undici: + specifier: ^5.26.2 + version: 5.26.2 + +devDependencies: + '@types/node': + specifier: ^20.5.1 + version: 20.5.1 + tsup: + specifier: ^7.2.0 + version: 7.2.0(typescript@5.1.6) + tsx: + specifier: ^3.12.7 + version: 3.12.7 + typescript: + specifier: ^5.1.6 + version: 5.1.6 packages: diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json deleted file mode 100644 index 6b8a756bc6ee..000000000000 --- a/query-engine/driver-adapters/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "private": true, - "name": "js", - "version": "0.0.2", - "description": "", - "engines": { - "node": ">=16.13", - "pnpm": ">=8.6.6 <9" - }, - "license": "Apache-2.0", - "scripts": { - "build": "pnpm -r run build", - "lint": "pnpm -r run lint" - }, - "keywords": [], - "author": "", - "devDependencies": { - "@types/node": "^20.5.1", - "tsup": "^7.2.0", - "tsx": "^3.12.7", - "typescript": "^5.1.6" - } -} \ No newline at end of file diff --git a/query-engine/driver-adapters/pnpm-workspace.yaml b/query-engine/driver-adapters/pnpm-workspace.yaml deleted file mode 100644 index c7b49e07b95f..000000000000 --- a/query-engine/driver-adapters/pnpm-workspace.yaml +++ /dev/null @@ -1,4 +0,0 @@ -packages: - - "./connector-test-kit-executor" - - "./driver-adapter-utils" - - "./adapter-*" From 6d070ed424b473df72b0dc529ff05db9ea5d94f4 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 15:20:56 +0200 Subject: [PATCH 14/26] DRIVER_ADAPTERS_BRANCH=driver-adapters-migration Add make tasks to test driver adapters --- Makefile | 6 ++++++ .../driver-adapters/connector-test-kit-executor/src/qe.ts | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index e51304cb9f12..cbcda4d4a757 100644 --- a/Makefile +++ b/Makefile @@ -116,12 +116,16 @@ start-pg-postgres13: build-qe-napi build-connector-kit-js start-postgres13 dev-pg-postgres13: start-pg-postgres13 cp $(CONFIG_PATH)/pg-postgres13 $(CONFIG_FILE) +test-pg-postgres13: dev-pg-postgres13 test-qe-st + start-neon-postgres13: build-qe-napi build-connector-kit-js docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 dev-neon-ws-postgres13: start-neon-postgres13 cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE) +test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st + start-postgres14: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 @@ -256,6 +260,8 @@ start-planetscale-vitess8: build-qe-napi build-connector-kit-js dev-planetscale-vitess8: start-planetscale-vitess8 cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE) +test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st + ###################### # Local dev commands # ###################### diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts index 764df8f6108d..186d7a9e80d2 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/qe.ts @@ -10,7 +10,7 @@ export function initQueryEngine(adapter: ErrorCapturingDriverAdapter, datamodel: const libExt = os.platform() === 'darwin' ? 'dylib' : 'so' const dirname = path.dirname(new URL(import.meta.url).pathname) - const libQueryEnginePath = path.join(dirname, `../../../../../target/debug/libquery_engine.${libExt}`) + const libQueryEnginePath = path.join(dirname, `../../../../target/debug/libquery_engine.${libExt}`) const libqueryEngine = { exports: {} as unknown as lib.Library } // @ts-ignore From 5c2601aafcd696d9c78d55e1a823dd50ebbd7ff5 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 15:40:35 +0200 Subject: [PATCH 15/26] Document and ease running driver adapter tests --- Makefile | 16 ++++++++++++++++ README.md | 16 ++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/Makefile b/Makefile index cbcda4d4a757..5c27cf0c027b 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,13 @@ release: ################# test-qe: +ifndef DRIVER_ADAPTER cargo test --package query-engine-tests +else + @echo "Executing query engine tests with $(DRIVER_ADAPTER) driver adapter"; \ + # Add your actual command for the "test-driver-adapter" task here + $(MAKE) test-driver-adapter-$(DRIVER_ADAPTER); +endif test-qe-verbose: cargo test --package query-engine-tests -- --nocapture @@ -81,6 +87,10 @@ dev-sqlite: dev-libsql-sqlite: build-qe-napi build-connector-kit-js cp $(CONFIG_PATH)/libsql-sqlite $(CONFIG_FILE) +test-libsql-sqlite: dev-libsql-sqlite test-qe-st + +test-driver-adapter-libsql: test-libsql-sqlite + start-postgres9: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 @@ -118,6 +128,8 @@ dev-pg-postgres13: start-pg-postgres13 test-pg-postgres13: dev-pg-postgres13 test-qe-st +test-driver-adapter-pg: test-pg-postgres13 + start-neon-postgres13: build-qe-napi build-connector-kit-js docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13 @@ -126,6 +138,8 @@ dev-neon-ws-postgres13: start-neon-postgres13 test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st +test-driver-adapter-neon: test-neon-ws-postgres13 + start-postgres14: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres14 @@ -262,6 +276,8 @@ dev-planetscale-vitess8: start-planetscale-vitess8 test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st +test-driver-adapter-planetscale: test-planetscale-vitess8 + ###################### # Local dev commands # ###################### diff --git a/README.md b/README.md index 6fd072072757..92da9adad511 100644 --- a/README.md +++ b/README.md @@ -203,6 +203,7 @@ integration tests. - Alternatively: Load the defined environment in `./.envrc` manually in your shell. **Setup:** + There are helper `make` commands to set up a test environment for a specific database connector you want to test. The commands set up a container (if needed) and write the `.test_config` file, which is picked up by the integration @@ -234,6 +235,21 @@ Other variables may or may not be useful. Run `cargo test` in the repository root. +#### Testing driver adapters + +Driver adapters are a feature to run queries through javascript drivers from rust. An adapter for a certain driver is +provided by the prisma client and then use by rust code. If you want to run query engine tests through driver adapters: + +- `DRIVER_ADAPTER=$adapter make qe-test` + +Where `$adapter` is one of `pg`, `neon`, or `planetscale` or `libsql` (the driver adapters currently supported) + +This make task hides the underlying complexity of spawning the right docker containers, pulling driver adapters code +from prisma/prisma, building them, and build a test runner to use them. + +When pulling the driver adapters code, make will ensure you have a clone of prisma/prisma in the same directory as +prisma-engines. If you don't, it will clone it for you. + ## Parallel rust-analyzer builds When rust-analzyer runs `cargo check` it will lock the build directory and stop any cargo commands from running until it has completed. This makes the build process feel a lot longer. It is possible to avoid this by setting a different build path for From 96d4cd0337c4a5f1866c9d762e9d365577f48d6f Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 15:42:18 +0200 Subject: [PATCH 16/26] Revert "DRIVER_ADAPTERS_BRANCH=driver-adapters-migration tmp remove" This reverts commit 463775a8b467ff7aa0d3e21063f1e617014f4b81. --- .github/workflows/benchmark.yml | 42 ++++ .github/workflows/build-apple-intel.yml | 49 ++++ .github/workflows/build-apple-silicon.yml | 51 ++++ .github/workflows/build-wasm.yml | 25 ++ .github/workflows/build-windows.yml | 44 ++++ .github/workflows/compilation.yml | 40 +++ .github/workflows/formatting.yml | 46 ++++ .github/workflows/on-push-to-main.yml | 29 +++ .github/workflows/publish-driver-adapters.yml | 83 ++++++ .../workflows/publish-prisma-schema-wasm.yml | 70 ++++++ .github/workflows/quaint.yml | 58 +++++ .github/workflows/query-engine-black-box.yml | 68 +++++ .../query-engine-driver-adapters.yml | 6 + .github/workflows/query-engine.yml | 100 ++++++++ .github/workflows/schema-engine.yml | 238 ++++++++++++++++++ .github/workflows/send-tag-event.yml | 24 ++ .github/workflows/unit-tests.yml | 45 ++++ 17 files changed, 1018 insertions(+) create mode 100644 .github/workflows/benchmark.yml create mode 100644 .github/workflows/build-apple-intel.yml create mode 100644 .github/workflows/build-apple-silicon.yml create mode 100644 .github/workflows/build-wasm.yml create mode 100644 .github/workflows/build-windows.yml create mode 100644 .github/workflows/compilation.yml create mode 100644 .github/workflows/formatting.yml create mode 100644 .github/workflows/on-push-to-main.yml create mode 100644 .github/workflows/publish-driver-adapters.yml create mode 100644 .github/workflows/publish-prisma-schema-wasm.yml create mode 100644 .github/workflows/quaint.yml create mode 100644 .github/workflows/query-engine-black-box.yml create mode 100644 .github/workflows/query-engine.yml create mode 100644 .github/workflows/schema-engine.yml create mode 100644 .github/workflows/send-tag-event.yml create mode 100644 .github/workflows/unit-tests.yml diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 000000000000..4dbfa4855fc9 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,42 @@ +name: Benchmark +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/benchmark.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + benchmark: + name: 'Run benchmarks on Linux' + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + + - name: Install cargo-codspeed + run: cargo install cargo-codspeed + + - name: 'Build the benchmark targets: schema' + run: cargo codspeed build -p schema + + - name: 'Build the benchmark targets: request-handlers' + run: cargo codspeed build -p request-handlers + + - name: Run the benchmarks + uses: CodSpeedHQ/action@v1 + with: + run: cargo codspeed run + token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.github/workflows/build-apple-intel.yml b/.github/workflows/build-apple-intel.yml new file mode 100644 index 000000000000..994cbfbb0ad0 --- /dev/null +++ b/.github/workflows/build-apple-intel.yml @@ -0,0 +1,49 @@ +on: + workflow_dispatch: + inputs: + commit: + description: "Commit on the given branch to build" + required: false + +jobs: + build: + # Do not change `name`, prisma-engines Buildkite build job depends on this name ending with the commit + name: "MacOS Intel engines build on branch ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}" + env: + SQLITE_MAX_VARIABLE_NUMBER: 250000 + SQLITE_MAX_EXPR_DEPTH: 10000 + + # minimum supported version of macOS + MACOSX_DEPLOYMENT_TARGET: 10.15 + runs-on: macos-13 + + steps: + - name: Output link to real commit + run: echo ${{ github.repository }}/commit/${{ github.event.inputs.commit }} + + - name: Checkout ${{ github.event.inputs.commit }} + uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.commit }} + + - uses: dtolnay/rust-toolchain@stable + + - uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-intel-cargo-${{ hashFiles('**/Cargo.lock') }} + + - run: | + cargo build --release -p query-engine -p query-engine-node-api -p schema-engine-cli -p prisma-fmt + + - uses: actions/upload-artifact@v3 + with: + name: binaries + path: | + ${{ github.workspace }}/target/release/schema-engine + ${{ github.workspace }}/target/release/prisma-fmt + ${{ github.workspace }}/target/release/query-engine + ${{ github.workspace }}/target/release/libquery_engine.dylib diff --git a/.github/workflows/build-apple-silicon.yml b/.github/workflows/build-apple-silicon.yml new file mode 100644 index 000000000000..74c49c5154fa --- /dev/null +++ b/.github/workflows/build-apple-silicon.yml @@ -0,0 +1,51 @@ +on: + workflow_dispatch: + inputs: + commit: + description: "Commit on the given branch to build" + required: false + +jobs: + build: + # Do not change `name`, prisma-engines Buildkite build job depends on this name ending with the commit + name: "MacOS ARM64 (Apple Silicon) engines build on branch ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}" + env: + SQLITE_MAX_VARIABLE_NUMBER: 250000 + SQLITE_MAX_EXPR_DEPTH: 10000 + runs-on: macos-13 + + steps: + - name: Output link to real commit + run: echo ${{ github.repository }}/commit/${{ github.event.inputs.commit }} + + - name: Checkout ${{ github.event.inputs.commit }} + uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.commit }} + + - uses: dtolnay/rust-toolchain@stable + + - name: Install aarch64 toolchain + run: rustup target add aarch64-apple-darwin + + - uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + + - run: xcodebuild -showsdks + + - run: | + cargo build --target=aarch64-apple-darwin --release -p query-engine -p query-engine-node-api -p schema-engine-cli -p prisma-fmt + + - uses: actions/upload-artifact@v3 + with: + name: binaries + path: | + ${{ github.workspace }}/target/aarch64-apple-darwin/release/schema-engine + ${{ github.workspace }}/target/aarch64-apple-darwin/release/prisma-fmt + ${{ github.workspace }}/target/aarch64-apple-darwin/release/query-engine + ${{ github.workspace }}/target/aarch64-apple-darwin/release/libquery_engine.dylib diff --git a/.github/workflows/build-wasm.yml b/.github/workflows/build-wasm.yml new file mode 100644 index 000000000000..7969cd2dd462 --- /dev/null +++ b/.github/workflows/build-wasm.yml @@ -0,0 +1,25 @@ +name: WASM build +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/build-wasm.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +jobs: + build: + name: 'prisma-schema-wasm build ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: cachix/install-nix-action@v23 + + - run: nix build .#prisma-schema-wasm + - run: nix flake check diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml new file mode 100644 index 000000000000..1dcd11f092ee --- /dev/null +++ b/.github/workflows/build-windows.yml @@ -0,0 +1,44 @@ +on: + workflow_dispatch: + inputs: + commit: + description: "Commit on the given branch to build" + required: true + +jobs: + build: + # Do not change `name`, prisma-engines Buildkite build job depends on this name ending with the commit + name: "Windows engines build on branch ${{ github.event.ref }} for commit ${{ github.event.inputs.commit }}" + env: + SQLITE_MAX_VARIABLE_NUMBER: 250000 + SQLITE_MAX_EXPR_DEPTH: 10000 + RUSTFLAGS: "-C target-feature=+crt-static" + runs-on: windows-latest + + steps: + - name: Output link to real commit + run: echo ${{ github.repository }}/commit/${{ github.event.inputs.commit }} + + - name: Checkout ${{ github.event.inputs.commit }} + uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.commit }} + + - uses: dtolnay/rust-toolchain@stable + + - uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + + - run: cargo build --release + + - uses: actions/upload-artifact@v3 + with: + name: binaries + path: | + ${{ github.workspace }}/target/release/*.exe + ${{ github.workspace }}/target/release/*.dll diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml new file mode 100644 index 000000000000..d9f81f47772b --- /dev/null +++ b/.github/workflows/compilation.yml @@ -0,0 +1,40 @@ +name: 'Release binary compilation test' +on: + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/compilation.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test-crate-compilation: + name: 'Compile top level crates on Linux' + strategy: + fail-fast: false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + + - run: 'cargo clean && cargo build --release -p schema-engine-cli' + name: 'Compile Migration Engine' + + - run: 'cargo clean && cargo build --release -p prisma-fmt' + name: 'Compile prisma-fmt' + + - run: 'cargo clean && cargo build --release -p query-engine' + name: 'Compile Query Engine Binary' + + - run: 'cargo clean && cargo build --release -p query-engine-node-api' + name: 'Compile Query Engine Library' + + - name: 'Check that Cargo.lock did not change' + run: 'git diff --exit-code' diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml new file mode 100644 index 000000000000..50b635544b91 --- /dev/null +++ b/.github/workflows/formatting.yml @@ -0,0 +1,46 @@ +name: Formatting +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/formatting.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + clippy: + runs-on: ubuntu-latest + env: + RUSTFLAGS: '-Dwarnings' + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + components: clippy + - run: cargo clippy --all-features + + format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt + - name: Check formatting + run: cargo fmt -- --check + shellcheck: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run ShellCheck + uses: ludeeus/action-shellcheck@master diff --git a/.github/workflows/on-push-to-main.yml b/.github/workflows/on-push-to-main.yml new file mode 100644 index 000000000000..c9c153b1f12d --- /dev/null +++ b/.github/workflows/on-push-to-main.yml @@ -0,0 +1,29 @@ +name: Generate cargo docs & engines size +on: + workflow_dispatch: + push: + branches: + - main + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + publish-to-gh-pages: + runs-on: ubuntu-latest + strategy: + fail-fast: true + + steps: + - uses: actions/checkout@v4 + - uses: cachix/install-nix-action@v23 + - run: | + git config user.email "prismabots@gmail.com" + git config user.name "prisma-bot" + + - name: Generate cargo docs for the workspace to gh-pages branch + run: nix run .#publish-cargo-docs + + - name: Publish engines size to gh-pages branch + run: nix run .#publish-engine-size diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml new file mode 100644 index 000000000000..7da972c35e1b --- /dev/null +++ b/.github/workflows/publish-driver-adapters.yml @@ -0,0 +1,83 @@ +name: Build and publish Prisma Driver Adapters +run-name: npm - release Driver Adapters ${{ github.event.inputs.prismaVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} + +concurrency: publish-prisma-driver-adapters + +on: + # usually triggered via GH Actions Workflow in prisma/prisma repo + workflow_dispatch: + inputs: + enginesHash: + description: Engine commit hash to checkout for publishing + required: true + prismaVersion: + description: Prisma version to use for publishing + required: true + npmDistTag: + description: npm dist-tag to use for publishing + required: true + default: 'latest' + dryRun: + description: 'Check to do a dry run (does not publish packages)' + type: boolean + +jobs: + build: + name: Build and publish Prisma Driver Adapters + runs-on: ubuntu-latest + steps: + - name: Print input + env: + THE_INPUT: '${{ toJson(github.event.inputs) }}' + run: | + echo $THE_INPUT + + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.enginesHash }} + + - uses: pnpm/action-setup@v2.4.0 + with: + version: 8 + + - uses: actions/setup-node@v3 + with: + node-version: '20.x' + registry-url: 'https://registry.npmjs.org/' + + - name: Install dependencies + run: pnpm i + working-directory: query-engine/driver-adapters/js + + - name: Build + run: pnpm -r build + working-directory: query-engine/driver-adapters/js + + - name: Update version in package.json + run: | + # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result + find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; + working-directory: query-engine/driver-adapters/js + + - name: Publish Prisma Driver Adapters packages + run: | + pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} ${{ env.DRY_RUN }} + working-directory: query-engine/driver-adapters/js + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + DRY_RUN: ${{ github.event.inputs.dryRun == 'true' && '--dry-run' || '' }} + + # + # Failure handlers + # + + - name: Set current job url in SLACK_FOOTER env var + if: ${{ failure() }} + run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV + - name: Slack Notification on Failure + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2.2.1 + env: + SLACK_TITLE: 'prisma driver adapters publishing failed :x:' + SLACK_COLOR: '#FF0000' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml new file mode 100644 index 000000000000..f453811009ce --- /dev/null +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -0,0 +1,70 @@ +name: Build and publish @prisma/prisma-schema-wasm + +concurrency: build-prisma-schema-wasm + +on: + # usually triggered via GH Actions Workflow in prisma/engines-wrapper repo + workflow_dispatch: + inputs: + enginesWrapperVersion: + required: true + enginesHash: + required: true + npmDistTag: + required: true + default: 'latest' + +jobs: + build: + name: Build and publish @prisma/prisma-schema-wasm + runs-on: ubuntu-latest + steps: + - name: Print input + env: + THE_INPUT: '${{ toJson(github.event.inputs) }}' + run: | + echo $THE_INPUT + + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.enginesHash }} + - uses: cachix/install-nix-action@v23 + + # + # Build + # + + - run: nix build .#prisma-schema-wasm + + # + # Publish + # + + - uses: actions/setup-node@v3 + with: + node-version: '14.x' + + - name: Set up NPM token + run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc + + - run: | + PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) + npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + # + # Failure handlers + # + + - name: Set current job url in SLACK_FOOTER env var + if: ${{ failure() }} + run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV + + - name: Slack Notification on Failure + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2.2.1 + env: + SLACK_TITLE: 'prisma-schema-wasm publishing failed :x:' + SLACK_COLOR: '#FF0000' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/.github/workflows/quaint.yml b/.github/workflows/quaint.yml new file mode 100644 index 000000000000..7b49e80a7bd0 --- /dev/null +++ b/.github/workflows/quaint.yml @@ -0,0 +1,58 @@ +name: Quaint +on: + push: + branches: + - main + pull_request: + paths: + - 'quaint/**' + +jobs: + tests: + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + features: + - "--lib --features=all" + - "--lib --no-default-features --features=sqlite" + - "--lib --no-default-features --features=sqlite --features=pooled" + - "--lib --no-default-features --features=postgresql" + - "--lib --no-default-features --features=postgresql --features=pooled" + - "--lib --no-default-features --features=mysql" + - "--lib --no-default-features --features=mysql --features=pooled" + - "--lib --no-default-features --features=mssql" + - "--lib --no-default-features --features=mssql --features=pooled" + env: + TEST_MYSQL: "mysql://root:prisma@localhost:3306/prisma" + TEST_MYSQL8: "mysql://root:prisma@localhost:3307/prisma" + TEST_MYSQL_MARIADB: "mysql://root:prisma@localhost:3308/prisma" + TEST_PSQL: "postgres://postgres:prisma@localhost:5432/postgres" + TEST_MSSQL: "jdbc:sqlserver://localhost:1433;database=master;user=SA;password=;trustServerCertificate=true" + TEST_CRDB: "postgresql://prisma@127.0.0.1:26259/postgres" + + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + + - uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ matrix.features }} + + - name: Start Databases + run: docker-compose -f docker-compose.yml up -d + working-directory: ./quaint + + - name: Sleep for 20s + uses: juliangruber/sleep-action@v1 + with: + time: 20s + + - name: Run tests + run: cargo test ${{ matrix.features }} + working-directory: ./quaint diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml new file mode 100644 index 000000000000..78e60178d7f7 --- /dev/null +++ b/.github/workflows/query-engine-black-box.yml @@ -0,0 +1,68 @@ +name: Query Engine Black Box +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine-black-box.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + rust-tests: + name: 'Test query-engine as a black-box' + + strategy: + fail-fast: false + matrix: + database: + - name: 'postgres15' + single_threaded: false + connector: 'postgres' + version: '15' + + env: + LOG_LEVEL: 'info' + LOG_QUERIES: 'y' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' + TEST_RUNNER: 'direct' + TEST_CONNECTOR: ${{ matrix.database.connector }} + TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + continue-on-error: true + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: 'Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})' + run: make start-${{ matrix.database.name }} + + - uses: dtolnay/rust-toolchain@stable + + - run: export WORKSPACE_ROOT=$(pwd) && cargo build --package query-engine + env: + CLICOLOR_FORCE: 1 + + - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package black-box-tests -- --test-threads=1 + env: + CLICOLOR_FORCE: 1 diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 79279d617ec4..29b5df8fe5da 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -25,8 +25,14 @@ jobs: fail-fast: false matrix: adapter: + - name: 'pg' + setup_task: 'dev-pg-postgres13' - name: 'neon:ws' setup_task: 'dev-neon-ws-postgres13' + - name: 'libsql' + setup_task: 'dev-libsql-sqlite' + - name: 'planetscale' + setup_task: 'dev-planetscale-vitess8' node_version: ['18'] env: LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml new file mode 100644 index 000000000000..9c242217662d --- /dev/null +++ b/.github/workflows/query-engine.yml @@ -0,0 +1,100 @@ +name: Query Engine +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + rust-query-engine-tests: + name: 'Test ${{ matrix.database.name }} (${{ matrix.engine_protocol }}) on Linux' + + strategy: + fail-fast: false + matrix: + database: + - name: 'vitess_5_7' + single_threaded: true + connector: 'vitess' + version: '5.7' + - name: 'vitess_8_0' + single_threaded: true + connector: 'vitess' + version: '8.0' + - name: 'postgres15' + single_threaded: true + connector: 'postgres' + version: '15' + - name: 'mssql_2022' + single_threaded: false + connector: 'sqlserver' + version: '2022' + - name: 'mongodb_4_2' + single_threaded: true + connector: 'mongodb' + version: '4.2' + - name: 'cockroach_23_1' + single_threaded: false + connector: 'cockroachdb' + version: '23.1' + - name: 'cockroach_22_2' + single_threaded: false + connector: 'cockroachdb' + version: '22.2' + - name: 'cockroach_22_1_0' + single_threaded: false + connector: 'cockroachdb' + version: '22.1' + engine_protocol: [graphql, json] + + env: + LOG_LEVEL: 'info' + LOG_QUERIES: 'y' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' + TEST_RUNNER: 'direct' + TEST_CONNECTOR: ${{ matrix.database.connector }} + TEST_CONNECTOR_VERSION: ${{ matrix.database.version }} + PRISMA_ENGINE_PROTOCOL: ${{ matrix.engine_protocol }} + + runs-on: buildjet-16vcpu-ubuntu-2004 + steps: + - uses: actions/checkout@v4 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + continue-on-error: true + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: 'Start ${{ matrix.database.name }} (${{ matrix.engine_protocol }})' + run: make start-${{ matrix.database.name }} + + - uses: dtolnay/rust-toolchain@stable + + - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package query-engine-tests -- --test-threads=1 + if: ${{ matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + + - run: export WORKSPACE_ROOT=$(pwd) && cargo test --package query-engine-tests -- --test-threads=8 + if: ${{ !matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml new file mode 100644 index 000000000000..5bdf25a2bd35 --- /dev/null +++ b/.github/workflows/schema-engine.yml @@ -0,0 +1,238 @@ +name: Schema Engine +on: + push: + branches: + - main + pull_request: + paths-ignore: + # Generic + - '.github/**' + - '!.github/workflows/schema-engine.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + # Specific + - 'query-engine/**' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test-mongodb-schema-connector: + name: 'Test ${{ matrix.database.name }} on Linux' + strategy: + fail-fast: false + matrix: + database: + - name: 'mongodb42' + url: 'mongodb://prisma:prisma@localhost:27016/?authSource=admin&retryWrites=true' + - name: 'mongodb44' + url: 'mongodb://prisma:prisma@localhost:27017/?authSource=admin&retryWrites=true' + - name: 'mongodb5' + url: 'mongodb://prisma:prisma@localhost:27018/?authSource=admin&retryWrites=true' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + + - name: Login to Docker Hub + uses: docker/login-action@v3 + continue-on-error: true + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: 'Start ${{ matrix.database.name }}' + run: make start-${{ matrix.database.name }}-single + + - run: cargo test -p mongodb-schema-connector + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + + test-linux: + name: 'Test ${{ matrix.database.name }} on Linux' + + strategy: + fail-fast: false + matrix: + database: + - name: mssql_2017 + url: 'sqlserver://localhost:1434;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED' + - name: mssql_2019 + url: 'sqlserver://localhost:1433;database=master;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED' + - name: mysql_5_6 + url: 'mysql://root:prisma@localhost:3309' + - name: mysql_5_7 + url: 'mysql://root:prisma@localhost:3306' + - name: mysql_8 + url: 'mysql://root:prisma@localhost:3307' + - name: mysql_mariadb + url: 'mysql://root:prisma@localhost:3308' + - name: postgres9 + url: 'postgresql://postgres:prisma@localhost:5431' + - name: postgres10 + url: 'postgresql://postgres:prisma@localhost:5432' + - name: postgres11 + url: 'postgresql://postgres:prisma@localhost:5433' + - name: postgres12 + url: 'postgresql://postgres:prisma@localhost:5434' + - name: postgres13 + url: 'postgresql://postgres:prisma@localhost:5435' + - name: postgres14 + url: 'postgresql://postgres:prisma@localhost:5437' + - name: postgres15 + url: 'postgresql://postgres:prisma@localhost:5438' + - name: cockroach_23_1 + url: 'postgresql://prisma@localhost:26260' + - name: cockroach_22_2 + url: 'postgresql://prisma@localhost:26259' + - name: cockroach_22_1_0 + url: 'postgresql://prisma@localhost:26257' + - name: sqlite + url: sqlite + - name: vitess_5_7 + url: 'mysql://root:prisma@localhost:33577/test' + shadow_database_url: 'mysql://root:prisma@localhost:33578/shadow' + is_vitess: true + single_threaded: true + - name: vitess_8_0 + url: 'mysql://root:prisma@localhost:33807/test' + shadow_database_url: 'mysql://root:prisma@localhost:33808/shadow' + is_vitess: true + single_threaded: true + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + + - name: Login to Docker Hub + uses: docker/login-action@v3 + continue-on-error: true + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: 'Start ${{ matrix.database.name }}' + run: make start-${{ matrix.database.name }} + + - run: cargo test -p sql-introspection-tests + if: ${{ !matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + + - run: cargo test -p sql-schema-describer + if: ${{ !matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + + - run: cargo test -p sql-migration-tests + if: ${{ !matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + RUST_LOG: debug + + - run: cargo test -p schema-engine-cli + if: ${{ !matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + + - run: cargo test -p sql-introspection-tests -- --test-threads=1 + if: ${{ matrix.database.is_vitess }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + TEST_SHADOW_DATABASE_URL: ${{ matrix.database.shadow_database_url }} + + - run: cargo test -p sql-migration-tests -- --test-threads=1 + if: ${{ matrix.database.is_vitess }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + TEST_SHADOW_DATABASE_URL: ${{ matrix.database.shadow_database_url }} + RUST_LOG: debug + + - run: cargo test -p schema-engine-cli -- --test-threads=1 + if: ${{ matrix.database.is_vitess }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + TEST_SHADOW_DATABASE_URL: ${{ matrix.database.shadow_database_url }} + + - run: cargo test -p sql-schema-describer -- --test-threads=1 + if: ${{ !matrix.database.is_vitess && matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + + - run: cargo test -p sql-introspection-tests -- --test-threads=1 + if: ${{ !matrix.database.is_vitess && matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + + - run: cargo test -p sql-migration-tests -- --test-threads=1 + if: ${{ !matrix.database.is_vitess && matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + RUST_LOG: debug + + - run: cargo test -p schema-engine-cli -- --test-threads=1 + if: ${{ !matrix.database.is_vitess && matrix.database.single_threaded }} + env: + CLICOLOR_FORCE: 1 + TEST_DATABASE_URL: ${{ matrix.database.url }} + + test-windows: + strategy: + fail-fast: false + matrix: + db: + - name: mysql + url: 'mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60' + - name: mariadb + url: 'mysql://root@localhost:3306?connect_timeout=20&socket_timeout=60' + rust: + - stable + os: + - windows-latest + + runs-on: ${{ matrix.os }} + + name: 'Test ${{ matrix.db.name }} on Windows' + + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + + - uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ matrix.rust }}-${{ hashFiles('**/Cargo.lock') }} + + - name: Install ${{ matrix.db.name }} + run: | + iwr -useb 'https://raw.githubusercontent.com/scoopinstaller/install/master/install.ps1' -outfile 'install.ps1' + .\install.ps1 -RunAsAdmin + + scoop install sudo + scoop install ${{ matrix.db.name }} + sudo mysqld --install + sudo sc start MySQL + + - name: Run tests + run: cargo test -p sql-migration-tests + env: + TEST_DATABASE_URL: ${{ matrix.db.url }} diff --git a/.github/workflows/send-tag-event.yml b/.github/workflows/send-tag-event.yml new file mode 100644 index 000000000000..2088e258ed49 --- /dev/null +++ b/.github/workflows/send-tag-event.yml @@ -0,0 +1,24 @@ +name: Send Tag Event + +on: + push: + tags: + - '**' + +jobs: + send-tag-event: + runs-on: ubuntu-22.04 + steps: + - run: echo "Sending event for tag ${{ github.ref_name }}" + - uses: actions/github-script@v6 + with: + github-token: ${{ secrets.ACCELERATE_DOCKER_QE_BUILD }} + script: | + await github.rest.repos.createDispatchEvent({ + owner: 'prisma', + repo: 'pdp-cloudflare', + event_type: "engines_release", + client_payload: { + prisma_version: '${{ github.ref_name }}', + }, + }) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml new file mode 100644 index 000000000000..b852499205e9 --- /dev/null +++ b/.github/workflows/unit-tests.yml @@ -0,0 +1,45 @@ +name: Unit tests +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/unit-tests.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + name: Workspace unit tests + + strategy: + fail-fast: false + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + + - run: | + cargo test --workspace \ + --exclude=quaint \ + --exclude=query-engine \ + --exclude=query-engine-node-api \ + --exclude=black-box-tests \ + --exclude=query-engine-tests \ + --exclude=sql-migration-tests \ + --exclude=schema-engine-cli \ + --exclude=sql-schema-describer \ + --exclude=sql-introspection-tests \ + --exclude=mongodb-schema-connector + env: + CLICOLOR_FORCE: 1 From ca8b9e3390c020ecbb78ecaa3727a7ff831247dc Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 16:09:34 +0200 Subject: [PATCH 17/26] Move documentation to where it belongs --- README.md | 13 +--------- query-engine/connector-test-kit-rs/README.md | 26 ++++++++++++++------ 2 files changed, 19 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index 92da9adad511..66199c096cf0 100644 --- a/README.md +++ b/README.md @@ -237,18 +237,7 @@ Run `cargo test` in the repository root. #### Testing driver adapters -Driver adapters are a feature to run queries through javascript drivers from rust. An adapter for a certain driver is -provided by the prisma client and then use by rust code. If you want to run query engine tests through driver adapters: - -- `DRIVER_ADAPTER=$adapter make qe-test` - -Where `$adapter` is one of `pg`, `neon`, or `planetscale` or `libsql` (the driver adapters currently supported) - -This make task hides the underlying complexity of spawning the right docker containers, pulling driver adapters code -from prisma/prisma, building them, and build a test runner to use them. - -When pulling the driver adapters code, make will ensure you have a clone of prisma/prisma in the same directory as -prisma-engines. If you don't, it will clone it for you. +Please refer to the [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters) section in the connector-test-kit-rs README. ## Parallel rust-analyzer builds diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 33d6fecb80ee..97d19467879a 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -64,6 +64,16 @@ On the note of docker containers: Most connectors require an endpoint to run aga If you choose to set up the databases yourself, please note that the connection strings used in the tests (found in the files in `/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/`) to set up user, password and database for the test user. +### Running + +Note that by default tests run concurrently. + +- VSCode should automatically detect tests and display `run test`. +- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. +- `cargo test` in the `query-engine-tests` crate. +- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. +- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. + #### Running tests through driver adapters The query engine is able to delegate query execution to javascript through driver adapters. @@ -72,7 +82,7 @@ drivers the code that actually communicates with the databases. See [`adapter-*` To run tests through a driver adapters, you should also configure the following environment variables: -* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. +* `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. @@ -84,15 +94,15 @@ export DRIVER_ADAPTER=neon export DRIVER_ADAPTER_CONFIG ='{ "proxyUrl": "127.0.0.1:5488/v1" }' ```` -### Running +We have provided helpers to run the query-engine tests with driver adapters, these helpers set all the required environment +variables for you: -Note that by default tests run concurrently. +```shell +DRIVER_ADAPTER=$adapter make test-qe +``` + +Where `$adapter` is one of the supported adapters: `neon`, `planetscale`, `libsql`. -- VSCode should automatically detect tests and display `run test`. -- Use `make test-qe` (minimal log output) or `make test-qe-verbose` (all log output) in `$WORKSPACE_ROOT`. -- `cargo test` in the `query-engine-tests` crate. -- A single test can be tested with the normal cargo rust facilities from command line, e.g. `cargo test --package query-engine-tests --test query_engine_tests --all-features -- queries::filters::where_unique::where_unique::no_unique_fields --exact --nocapture` where `queries::filters::where_unique::where_unique::no_unique_fields` can be substituted for the path you want to test. -- If you want to test a single relation test, define the `RELATION_TEST_IDX` env var with its index. ## Authoring tests The following is an example on how to write a new test suite, as extending or changing an existing one follows the same rules and considerations. From 06cb9c82c54f862e8b7684b4569a088b4c78d538 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Wed, 25 Oct 2023 17:03:56 +0200 Subject: [PATCH 18/26] Document how to do integration testing in shorter loops in CI. --- README.md | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 66199c096cf0..49c7c1a8ab39 100644 --- a/README.md +++ b/README.md @@ -235,10 +235,31 @@ Other variables may or may not be useful. Run `cargo test` in the repository root. -#### Testing driver adapters +### Testing driver adapters Please refer to the [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters) section in the connector-test-kit-rs README. +**ℹ️ Important note on developing features that require changes to the both the query engine, and driver adapters code** + +As explained in [Testing driver adapters](./query-engine/connector-test-kit-rs/README.md#testing-driver-adapters), running `DRIVER_ADAPTER=$adapter make qe-test` +will ensure you have prisma checked out in your filesystem in the same directory as prisma-engines. This is needed because the driver adapters code is symlinked in prisma-engines. + +When working on a feature or bugfix spanning adapters code and query-engine code, you will need to open sibling PRs in `prisma/prisma` and `prisma/prisma-engines` respectively. +Locally, each time you run `DRIVER_ADAPTER=$adapter make qe-test` tests will run using the driver adapters built from the source code in the working copy of prisma/prisma. All good. + +In CI, tho', we need to denote which branch of prisma/prisma we want to use for tests. In CI, there's no working copy of prisma/prisma before tests run. +The CI jobs clones prisma/prisma `main` branch by default, which doesn't include your local changes. To test in integration, we can tell CI to use the branch of prisma/prisma containing +the changes in adapters. To do it, you can use a simple convention in commit messages. Like this: + +``` +git commit -m "DRIVER_ADAPTERS_BRANCH=prisma-branch-with-changes-in-adapters [...]" +``` + +GitHub actions will then pick up the branch name and use it to clone that branch's code of prisma/prisma, and build the driver adapters code from there. + +When it's time to merge the sibling PRs, you'll need to merge the prisma/prisma PR first, so when merging the engines PR you have the code of the adapters ready in prisma/prisma `main` branch. + + ## Parallel rust-analyzer builds When rust-analzyer runs `cargo check` it will lock the build directory and stop any cargo commands from running until it has completed. This makes the build process feel a lot longer. It is possible to avoid this by setting a different build path for From 0c2f426a12199ea1ba0eab309ced988130a76c3a Mon Sep 17 00:00:00 2001 From: jkomyno Date: Thu, 26 Oct 2023 14:10:32 +0200 Subject: [PATCH 19/26] chore(driver-adapters): remove outdated symlink to tsconfig file --- Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/Makefile b/Makefile index 5c27cf0c027b..541738c35d95 100644 --- a/Makefile +++ b/Makefile @@ -302,7 +302,6 @@ symlink-driver-adapters: ensure-prisma-present echo "Created symbolic link for $$dir_name"; \ fi; \ done; - @ln -sfn "../prisma/tsconfig.build.adapter.json" "./tsconfig.build.adapter.json"; \ echo "Symbolic links creation completed."; ensure-prisma-present: From d6614c12a91dd7605c0e735ceaff9384d8559ba3 Mon Sep 17 00:00:00 2001 From: jkomyno Date: Thu, 26 Oct 2023 14:11:19 +0200 Subject: [PATCH 20/26] fix(driver-adapters): use ws, making connector-test-kit-executor compatible with Node.js 16+ --- .../connector-test-kit-executor/package.json | 3 ++- .../connector-test-kit-executor/pnpm-lock.yaml | 11 +++++++---- .../connector-test-kit-executor/src/index.ts | 3 ++- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json index b4da6f9f0b24..d6ccaaa71e02 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -27,7 +27,8 @@ "@prisma/driver-adapter-utils": "../driver-adapter-utils", "@types/pg": "^8.10.2", "pg": "^8.11.3", - "undici": "^5.26.2" + "undici": "^5.26.5", + "ws": "^8.14.2" }, "devDependencies": { "@types/node": "^20.5.1", diff --git a/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml index 4ec9635190b0..d140be7b516c 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml +++ b/query-engine/driver-adapters/connector-test-kit-executor/pnpm-lock.yaml @@ -36,8 +36,11 @@ dependencies: specifier: ^8.11.3 version: 8.11.3 undici: - specifier: ^5.26.2 - version: 5.26.2 + specifier: ^5.26.5 + version: 5.26.5 + ws: + specifier: ^8.14.2 + version: 8.14.2 devDependencies: '@types/node': @@ -1350,8 +1353,8 @@ packages: hasBin: true dev: true - /undici@5.26.2: - resolution: {integrity: sha512-a4PDLQgLTPHVzOK+x3F79/M4GtyYPl+aX9AAK7aQxpwxDwCqkeZCScy7Gk5kWT3JtdFq1uhO3uZJdLtHI4dK9A==} + /undici@5.26.5: + resolution: {integrity: sha512-cSb4bPFd5qgR7qr2jYAi0hlX9n5YKK2ONKkLFkxl+v/9BvC0sOpZjBHDBSXc5lWAf5ty9oZdRXytBIHzgUcerw==} engines: {node: '>=14.0'} dependencies: '@fastify/busboy': 2.0.0 diff --git a/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts index 8a05a6b2e9aa..b89348fb3e77 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts +++ b/query-engine/driver-adapters/connector-test-kit-executor/src/index.ts @@ -9,7 +9,8 @@ import * as prismaPg from '@prisma/adapter-pg' // neon dependencies import { Pool as NeonPool, neonConfig } from '@neondatabase/serverless' -import { fetch, WebSocket } from 'undici' +import { fetch } from 'undici' +import { WebSocket } from 'ws' import * as prismaNeon from '@prisma/adapter-neon' // libsql dependencies From 187e46f99a9d9e563750b4cce81be87624393495 Mon Sep 17 00:00:00 2001 From: jkomyno Date: Thu, 26 Oct 2023 14:14:03 +0200 Subject: [PATCH 21/26] fix(driver-adapters): remove warning "import.meta" is not available with the "cjs" output format --- .../driver-adapters/connector-test-kit-executor/package.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/connector-test-kit-executor/package.json b/query-engine/driver-adapters/connector-test-kit-executor/package.json index d6ccaaa71e02..b63694bb4459 100644 --- a/query-engine/driver-adapters/connector-test-kit-executor/package.json +++ b/query-engine/driver-adapters/connector-test-kit-executor/package.json @@ -6,10 +6,11 @@ "name": "connector-test-kit-executor", "version": "0.0.1", "description": "", - "main": "dist/index.js", + "main": "dist/index.mjs", + "module": "dist/index.mjs", "private": true, "scripts": { - "build": "tsup ./src/index.ts --format cjs,esm --dts", + "build": "tsup ./src/index.ts --format esm --dts", "lint": "tsc -p ./tsconfig.build.json" }, "keywords": [], From eeaaa8f92a24ae8ff3951b786002ca76ae6de837 Mon Sep 17 00:00:00 2001 From: jkomyno Date: Thu, 26 Oct 2023 14:16:58 +0200 Subject: [PATCH 22/26] chore(driver-adapters): remove references to query-engine-driver-adapters.yml --- .envrc | 2 +- .../query-engine-driver-adapters.yml | 91 ------------------- query-engine/connector-test-kit-rs/README.md | 2 +- 3 files changed, 2 insertions(+), 93 deletions(-) delete mode 100644 .github/workflows/query-engine-driver-adapters.yml diff --git a/.envrc b/.envrc index 48b1254c1700..bd1773f56ce4 100644 --- a/.envrc +++ b/.envrc @@ -22,7 +22,7 @@ export QE_LOG_LEVEL=debug # Set it to "trace" to enable query-graph debugging lo # export PRISMA_RENDER_DOT_FILE=1 # Uncomment to enable rendering a dot file of the Query Graph from an executed query. # export FMT_SQL=1 # Uncomment it to enable logging formatted SQL queries -### Uncomment to run driver adapters tests. See query-engine-driver-adapters.yml workflow for how tests run in CI. +### Uncomment to run driver adapters tests. # export EXTERNAL_TEST_EXECUTOR="$(pwd)/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" # export DRIVER_ADAPTER=pg # Set to pg, neon or planetscale # export PRISMA_DISABLE_QUAINT_EXECUTORS=1 # Disable quaint executors for driver adapters diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml deleted file mode 100644 index 29b5df8fe5da..000000000000 --- a/.github/workflows/query-engine-driver-adapters.yml +++ /dev/null @@ -1,91 +0,0 @@ -name: Driver Adapters -on: - push: - branches: - - main - pull_request: - paths-ignore: - - '.github/**' - - '!.github/workflows/query-engine-driver-adapters.yml' - - '.buildkite/**' - - '*.md' - - 'LICENSE' - - 'CODEOWNERS' - - 'renovate.json' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - rust-query-engine-tests: - name: 'Test `${{ matrix.adapter.name }}` on node v${{ matrix.node_version }}' - - strategy: - fail-fast: false - matrix: - adapter: - - name: 'pg' - setup_task: 'dev-pg-postgres13' - - name: 'neon:ws' - setup_task: 'dev-neon-ws-postgres13' - - name: 'libsql' - setup_task: 'dev-libsql-sqlite' - - name: 'planetscale' - setup_task: 'dev-planetscale-vitess8' - node_version: ['18'] - env: - LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter - LOG_QUERIES: 'y' - RUST_LOG: 'info' - RUST_LOG_FORMAT: 'devel' - RUST_BACKTRACE: '1' - CLICOLOR_FORCE: '1' - CLOSED_TX_CLEANUP: '2' - SIMPLE_TEST_MODE: '1' - QUERY_BATCH_SIZE: '10' - WORKSPACE_ROOT: ${{ github.workspace }} - - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - - name: 'Setup Node.js' - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node_version }} - - - name: 'Setup pnpm' - uses: pnpm/action-setup@v2 - with: - version: 8 - - - name: 'Get pnpm store directory' - shell: bash - run: | - echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV - - - name: 'Login to Docker Hub' - uses: docker/login-action@v3 - continue-on-error: true - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Extract Branch Name - id: extract-branch - run: | - branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" - if [ -n $branch ]; then - echo "Using $branch branch of driver adapters" - echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" - fi - - - run: make ${{ matrix.adapter.setup_task }} - - - uses: dtolnay/rust-toolchain@stable - - - name: 'Run tests' - run: cargo test --package query-engine-tests -- --test-threads=1 diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 97d19467879a..cfce70d777ad 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -84,7 +84,7 @@ To run tests through a driver adapters, you should also configure the following * `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. -* `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. +* `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. Example: From 2ef6ee34ba3abae0ba1a4deb123655f30f08332e Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Thu, 26 Oct 2023 20:18:39 +0200 Subject: [PATCH 23/26] Revert "chore(driver-adapters): remove references to query-engine-driver-adapters.yml" This reverts commit eeaaa8f92a24ae8ff3951b786002ca76ae6de837. --- .envrc | 2 +- .../query-engine-driver-adapters.yml | 91 +++++++++++++++++++ query-engine/connector-test-kit-rs/README.md | 2 +- 3 files changed, 93 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/query-engine-driver-adapters.yml diff --git a/.envrc b/.envrc index bd1773f56ce4..48b1254c1700 100644 --- a/.envrc +++ b/.envrc @@ -22,7 +22,7 @@ export QE_LOG_LEVEL=debug # Set it to "trace" to enable query-graph debugging lo # export PRISMA_RENDER_DOT_FILE=1 # Uncomment to enable rendering a dot file of the Query Graph from an executed query. # export FMT_SQL=1 # Uncomment it to enable logging formatted SQL queries -### Uncomment to run driver adapters tests. +### Uncomment to run driver adapters tests. See query-engine-driver-adapters.yml workflow for how tests run in CI. # export EXTERNAL_TEST_EXECUTOR="$(pwd)/query-engine/driver-adapters/js/connector-test-kit-executor/script/start_node.sh" # export DRIVER_ADAPTER=pg # Set to pg, neon or planetscale # export PRISMA_DISABLE_QUAINT_EXECUTORS=1 # Disable quaint executors for driver adapters diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml new file mode 100644 index 000000000000..29b5df8fe5da --- /dev/null +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -0,0 +1,91 @@ +name: Driver Adapters +on: + push: + branches: + - main + pull_request: + paths-ignore: + - '.github/**' + - '!.github/workflows/query-engine-driver-adapters.yml' + - '.buildkite/**' + - '*.md' + - 'LICENSE' + - 'CODEOWNERS' + - 'renovate.json' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + rust-query-engine-tests: + name: 'Test `${{ matrix.adapter.name }}` on node v${{ matrix.node_version }}' + + strategy: + fail-fast: false + matrix: + adapter: + - name: 'pg' + setup_task: 'dev-pg-postgres13' + - name: 'neon:ws' + setup_task: 'dev-neon-ws-postgres13' + - name: 'libsql' + setup_task: 'dev-libsql-sqlite' + - name: 'planetscale' + setup_task: 'dev-planetscale-vitess8' + node_version: ['18'] + env: + LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter + LOG_QUERIES: 'y' + RUST_LOG: 'info' + RUST_LOG_FORMAT: 'devel' + RUST_BACKTRACE: '1' + CLICOLOR_FORCE: '1' + CLOSED_TX_CLEANUP: '2' + SIMPLE_TEST_MODE: '1' + QUERY_BATCH_SIZE: '10' + WORKSPACE_ROOT: ${{ github.workspace }} + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: 'Setup Node.js' + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node_version }} + + - name: 'Setup pnpm' + uses: pnpm/action-setup@v2 + with: + version: 8 + + - name: 'Get pnpm store directory' + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV + + - name: 'Login to Docker Hub' + uses: docker/login-action@v3 + continue-on-error: true + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Extract Branch Name + id: extract-branch + run: | + branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" + if [ -n $branch ]; then + echo "Using $branch branch of driver adapters" + echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" + fi + + - run: make ${{ matrix.adapter.setup_task }} + + - uses: dtolnay/rust-toolchain@stable + + - name: 'Run tests' + run: cargo test --package query-engine-tests -- --test-threads=1 diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index cfce70d777ad..97d19467879a 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -84,7 +84,7 @@ To run tests through a driver adapters, you should also configure the following * `EXTERNAL_TEST_EXECUTOR`: tells the query engine test kit to use an external process to run the queries, this is a node process running a program that will read the queries to run from STDIN, and return responses to STDOUT. The connector kit follows a protocol over JSON RPC for this communication. * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. -* `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. +* `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. Example: From a0b1f28152c292b1850472b1fc0c20fa2124eea4 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Thu, 26 Oct 2023 20:19:25 +0200 Subject: [PATCH 24/26] Remove publish-driver-adapters workflow --- .github/workflows/publish-driver-adapters.yml | 83 ------------------- 1 file changed, 83 deletions(-) delete mode 100644 .github/workflows/publish-driver-adapters.yml diff --git a/.github/workflows/publish-driver-adapters.yml b/.github/workflows/publish-driver-adapters.yml deleted file mode 100644 index 7da972c35e1b..000000000000 --- a/.github/workflows/publish-driver-adapters.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: Build and publish Prisma Driver Adapters -run-name: npm - release Driver Adapters ${{ github.event.inputs.prismaVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} - -concurrency: publish-prisma-driver-adapters - -on: - # usually triggered via GH Actions Workflow in prisma/prisma repo - workflow_dispatch: - inputs: - enginesHash: - description: Engine commit hash to checkout for publishing - required: true - prismaVersion: - description: Prisma version to use for publishing - required: true - npmDistTag: - description: npm dist-tag to use for publishing - required: true - default: 'latest' - dryRun: - description: 'Check to do a dry run (does not publish packages)' - type: boolean - -jobs: - build: - name: Build and publish Prisma Driver Adapters - runs-on: ubuntu-latest - steps: - - name: Print input - env: - THE_INPUT: '${{ toJson(github.event.inputs) }}' - run: | - echo $THE_INPUT - - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.inputs.enginesHash }} - - - uses: pnpm/action-setup@v2.4.0 - with: - version: 8 - - - uses: actions/setup-node@v3 - with: - node-version: '20.x' - registry-url: 'https://registry.npmjs.org/' - - - name: Install dependencies - run: pnpm i - working-directory: query-engine/driver-adapters/js - - - name: Build - run: pnpm -r build - working-directory: query-engine/driver-adapters/js - - - name: Update version in package.json - run: | - # find all files package.json, and for each use jq to write the version, then write to temp file and overwrite original file with result - find . -name "package.json" -exec bash -c 'jq --arg version "${{ github.event.inputs.prismaVersion }}" ".version = \$version" "{}" > tmpfile && mv tmpfile "{}"' \; - working-directory: query-engine/driver-adapters/js - - - name: Publish Prisma Driver Adapters packages - run: | - pnpm -r publish --no-git-checks --tag ${{ github.event.inputs.npmDistTag }} ${{ env.DRY_RUN }} - working-directory: query-engine/driver-adapters/js - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - DRY_RUN: ${{ github.event.inputs.dryRun == 'true' && '--dry-run' || '' }} - - # - # Failure handlers - # - - - name: Set current job url in SLACK_FOOTER env var - if: ${{ failure() }} - run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure - if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 - env: - SLACK_TITLE: 'prisma driver adapters publishing failed :x:' - SLACK_COLOR: '#FF0000' - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_DRIVER_ADPATERS_FAILING }} From feb52441b775acf430cdbbd271257b4144d958ae Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Fri, 27 Oct 2023 10:29:29 +0200 Subject: [PATCH 25/26] Fix using main branch --- .github/workflows/query-engine-driver-adapters.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 29b5df8fe5da..e64dd6ab680d 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -78,7 +78,7 @@ jobs: id: extract-branch run: | branch="$(git show -s --format=%s | grep -o "DRIVER_ADAPTERS_BRANCH=[^ ]*" | cut -f2 -d=)" - if [ -n $branch ]; then + if [ -n "$branch" ]; then echo "Using $branch branch of driver adapters" echo "DRIVER_ADAPTERS_BRANCH=$branch" >> "$GITHUB_ENV" fi From 489fe860528e2045462fc8f06e774d07fc677597 Mon Sep 17 00:00:00 2001 From: Miguel Fernandez Date: Fri, 27 Oct 2023 10:31:05 +0200 Subject: [PATCH 26/26] Take back conditional on docker login after bad main merge --- .github/workflows/query-engine-driver-adapters.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index e64dd6ab680d..f02045427df8 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -69,6 +69,7 @@ jobs: - name: 'Login to Docker Hub' uses: docker/login-action@v3 + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" continue-on-error: true with: username: ${{ secrets.DOCKERHUB_USERNAME }}