From 7e6deb2a66f3bf11dd8c80ecc64c1437d40a37e8 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 15 Mar 2024 09:18:16 +0100 Subject: [PATCH 01/45] Add C ABI target with support for iOS and Android --- Cargo.lock | 70 +- Cargo.toml | 4 + Makefile | 3 + libs/query-engine-common/Cargo.toml | 6 +- libs/query-engine-common/src/engine.rs | 1 + libs/query-engine-common/src/error.rs | 1 + libs/user-facing-errors/src/common.rs | 10 + libs/user-facing-errors/src/lib.rs | 13 + query-engine/connector-test-kit-rs/README.md | 2 +- .../tests/new/interactive_tx.rs | 2 +- .../query-engine-tests/tests/new/metrics.rs | 2 +- .../query-tests-setup/src/config.rs | 22 +- .../src/connector_tag/sqlite.rs | 3 + .../test-configs/react-native | 5 + .../connectors/sql-query-connector/Cargo.toml | 14 +- .../sql-query-connector/src/database/mod.rs | 19 +- .../connectors/sql-query-connector/src/lib.rs | 13 +- .../driver-adapters/executor/package.json | 8 +- .../driver-adapters/executor/src/rn.ts | 79 +++ .../driver-adapters/executor/src/testd.ts | 21 +- query-engine/driver-adapters/package.json | 6 +- query-engine/query-engine-c-abi/.gitignore | 10 + query-engine/query-engine-c-abi/Cargo.toml | 49 ++ query-engine/query-engine-c-abi/Makefile | 59 ++ query-engine/query-engine-c-abi/README.md | 51 ++ .../build-android-target.sh | 60 ++ .../query-engine-c-abi/build-openssl.sh | 78 +++ query-engine/query-engine-c-abi/build.rs | 33 + .../query-engine-c-abi/cargo-config.toml | 14 + .../query-engine-c-abi/copy-android.sh | 18 + query-engine/query-engine-c-abi/copy-ios.sh | 14 + query-engine/query-engine-c-abi/src/engine.rs | 653 ++++++++++++++++++ query-engine/query-engine-c-abi/src/error.rs | 103 +++ .../query-engine-c-abi/src/functions.rs | 40 ++ query-engine/query-engine-c-abi/src/lib.rs | 9 + query-engine/query-engine-c-abi/src/logger.rs | 174 +++++ .../query-engine-c-abi/src/migrations.rs | 191 +++++ query-engine/query-engine-c-abi/src/tracer.rs | 1 + query-engine/query-engine-node-api/Cargo.toml | 8 +- .../query-engine-wasm/example/.gitignore | 1 + .../query-engine-wasm/example/example.js | 84 +++ .../query-engine-wasm/example/package.json | 14 + .../query-engine-wasm/example/pnpm-lock.yaml | 382 ++++++++++ .../example/prisma/schema.prisma | 13 + query-engine/query-engine/Cargo.toml | 7 +- query-engine/request-handlers/Cargo.toml | 6 +- .../request-handlers/src/load_executor.rs | 9 +- renovate.json | 3 - schema-engine/core/src/lib.rs | 2 +- schema-engine/core/src/state.rs | 12 +- 50 files changed, 2343 insertions(+), 59 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/test-configs/react-native create mode 100644 query-engine/driver-adapters/executor/src/rn.ts create mode 100644 query-engine/query-engine-c-abi/.gitignore create mode 100644 query-engine/query-engine-c-abi/Cargo.toml create mode 100644 query-engine/query-engine-c-abi/Makefile create mode 100644 query-engine/query-engine-c-abi/README.md create mode 100755 query-engine/query-engine-c-abi/build-android-target.sh create mode 100755 query-engine/query-engine-c-abi/build-openssl.sh create mode 100644 query-engine/query-engine-c-abi/build.rs create mode 100644 query-engine/query-engine-c-abi/cargo-config.toml create mode 100755 query-engine/query-engine-c-abi/copy-android.sh create mode 100755 query-engine/query-engine-c-abi/copy-ios.sh create mode 100644 query-engine/query-engine-c-abi/src/engine.rs create mode 100644 query-engine/query-engine-c-abi/src/error.rs create mode 100644 query-engine/query-engine-c-abi/src/functions.rs create mode 100644 query-engine/query-engine-c-abi/src/lib.rs create mode 100644 query-engine/query-engine-c-abi/src/logger.rs create mode 100644 query-engine/query-engine-c-abi/src/migrations.rs create mode 100644 query-engine/query-engine-c-abi/src/tracer.rs create mode 100644 query-engine/query-engine-wasm/example/.gitignore create mode 100644 query-engine/query-engine-wasm/example/example.js create mode 100644 query-engine/query-engine-wasm/example/package.json create mode 100644 query-engine/query-engine-wasm/example/pnpm-lock.yaml create mode 100644 query-engine/query-engine-wasm/example/prisma/schema.prisma diff --git a/Cargo.lock b/Cargo.lock index b58037af9285..e5ac5c84c9dd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -475,6 +475,25 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" +[[package]] +name = "cbindgen" +version = "0.24.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b922faaf31122819ec80c4047cc684c6979a087366c069611e33649bf98e18d" +dependencies = [ + "clap 3.2.25", + "heck 0.4.1", + "indexmap 1.9.3", + "log", + "proc-macro2", + "quote", + "serde", + "serde_json", + "syn 1.0.109", + "tempfile", + "toml", +] + [[package]] name = "cc" version = "1.0.83" @@ -574,9 +593,12 @@ version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ + "atty", "bitflags 1.3.2", "clap_lex", "indexmap 1.9.3", + "strsim 0.10.0", + "termcolor", "textwrap 0.16.0", ] @@ -2096,9 +2118,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.150" +version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" [[package]] name = "libloading" @@ -3721,6 +3743,41 @@ dependencies = [ "user-facing-errors", ] +[[package]] +name = "query-engine-c-abi" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "cbindgen", + "chrono", + "connection-string", + "futures", + "indoc 2.0.3", + "once_cell", + "opentelemetry", + "psl", + "quaint", + "query-connector", + "query-core", + "query-engine-common", + "query-structure", + "request-handlers", + "rusqlite", + "serde", + "serde_json", + "sql-query-connector", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "tracing-opentelemetry", + "tracing-subscriber", + "url", + "user-facing-errors", + "uuid", +] + [[package]] name = "query-engine-common" version = "0.1.0" @@ -5241,6 +5298,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "termcolor" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449" +dependencies = [ + "winapi-util", +] + [[package]] name = "test-cli" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index f14f7c508c8c..513dc7283b04 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,7 @@ members = [ "query-engine/query-engine", "query-engine/query-engine-node-api", "query-engine/query-engine-wasm", + "query-engine/query-engine-c-abi", "query-engine/request-handlers", "query-engine/schema", "libs/*", @@ -86,6 +87,9 @@ strip = "symbols" [profile.release.package.query-engine] strip = "symbols" +[profile.release.package.query-engine-c-abi] +strip = "symbols" + [profile.release] lto = "fat" codegen-units = 1 diff --git a/Makefile b/Makefile index e4764c48b9a5..02932a401258 100644 --- a/Makefile +++ b/Makefile @@ -136,6 +136,9 @@ start-sqlite: dev-sqlite: cp $(CONFIG_PATH)/sqlite $(CONFIG_FILE) +dev-react-native: + cp $(CONFIG_PATH)/react-native $(CONFIG_FILE) + dev-libsql-js: build-qe-napi build-driver-adapters-kit cp $(CONFIG_PATH)/libsql-js $(CONFIG_FILE) diff --git a/libs/query-engine-common/Cargo.toml b/libs/query-engine-common/Cargo.toml index e2fb3b4bfe48..e1e83e1ffb3a 100644 --- a/libs/query-engine-common/Cargo.toml +++ b/libs/query-engine-common/Cargo.toml @@ -20,11 +20,11 @@ tracing.workspace = true tracing-subscriber = { version = "0.3" } tracing-futures = "0.2" tracing-opentelemetry = "0.17.3" -opentelemetry = { version = "0.17"} +opentelemetry = { version = "0.17" } -[target.'cfg(not(target_arch = "wasm32"))'.dependencies] -napi.workspace = true +[target.'cfg(all(not(target_arch = "wasm32"), not(target_os = "ios"), not(target_os = "android")))'.dependencies] query-engine-metrics = { path = "../../query-engine/metrics" } +napi.workspace = true [target.'cfg(target_arch = "wasm32")'.dependencies] wasm-bindgen.workspace = true diff --git a/libs/query-engine-common/src/engine.rs b/libs/query-engine-common/src/engine.rs index 77aa2fec804b..3e1d1cfef154 100644 --- a/libs/query-engine-common/src/engine.rs +++ b/libs/query-engine-common/src/engine.rs @@ -58,6 +58,7 @@ pub struct EngineBuilder { pub struct ConnectedEngineNative { pub config_dir: PathBuf, pub env: HashMap, + #[cfg(all(not(target_os = "ios"), not(target_os = "android")))] pub metrics: Option, } diff --git a/libs/query-engine-common/src/error.rs b/libs/query-engine-common/src/error.rs index f7c9712af8a7..ef3b4b719d2c 100644 --- a/libs/query-engine-common/src/error.rs +++ b/libs/query-engine-common/src/error.rs @@ -94,6 +94,7 @@ impl From for ApiError { } #[cfg(not(target_arch = "wasm32"))] +#[cfg(not(any(target_os = "android", target_os = "ios")))] impl From for napi::Error { fn from(e: ApiError) -> Self { let user_facing = user_facing_errors::Error::from(e); diff --git a/libs/user-facing-errors/src/common.rs b/libs/user-facing-errors/src/common.rs index 8976889e6114..412d23620fd8 100644 --- a/libs/user-facing-errors/src/common.rs +++ b/libs/user-facing-errors/src/common.rs @@ -239,6 +239,16 @@ pub struct UnsupportedFeatureError { pub message: String, } +#[derive(Debug, Serialize, UserFacingError)] +#[user_facing( + code = "P3009", + message = "migrate found failed migrations in the target database, new migrations will not be applied. Read more about how to resolve migration issues in a production database: https://pris.ly/d/migrate-resolve\n{details}" +)] +pub struct FoundFailedMigrations { + /// The details about each failed migration. + pub details: String, +} + #[cfg(test)] mod tests { use super::*; diff --git a/libs/user-facing-errors/src/lib.rs b/libs/user-facing-errors/src/lib.rs index 7d7856831637..03cb0438e5b3 100644 --- a/libs/user-facing-errors/src/lib.rs +++ b/libs/user-facing-errors/src/lib.rs @@ -216,3 +216,16 @@ impl From for Error { } } } + +impl From for Error { + fn from(serde_error: serde_json::Error) -> Self { + Error { + is_panic: false, + inner: ErrorType::Unknown(UnknownError { + message: format!("{}", serde_error), + backtrace: None, + }), + batch_request_idx: None, + } + } +} diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 650f8f2d4dd0..4b44b96c16ba 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -84,7 +84,7 @@ To run tests through a driver adapters, you should also configure the following * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. -* `ENGINE`: can be used to run either `wasm` or `napi` version of the engine. +* `ENGINE`: can be used to run either `wasm` or `napi` or `mobile` version of the engine. Example: diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index 743c42154db8..7f038e0d369e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -71,7 +71,7 @@ mod interactive_tx { @r###"{"data":{"createOneTestModel":{"id":1}}}"### ); - time::sleep(time::Duration::from_millis(1500)).await; + time::sleep(time::Duration::from_millis(10500)).await; runner.clear_active_tx(); // Everything must be rolled back. diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index 827a35daeac7..d47dea61e186 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -5,7 +5,7 @@ use query_engine_tests::test_suite; exclude( Vitess("planetscale.js", "planetscale.js.wasm"), Postgres("neon.js", "pg.js", "neon.js.wasm", "pg.js.wasm"), - Sqlite("libsql.js", "libsql.js.wasm") + Sqlite("libsql.js", "libsql.js.wasm", "rn") ) )] mod metrics { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index f1248e3c4d94..625dd14d333d 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -7,10 +7,11 @@ use std::{convert::TryFrom, env, fmt::Display, fs::File, io::Read, path::PathBuf static TEST_CONFIG_FILE_NAME: &str = ".test_config"; -#[derive(Debug, Deserialize, Clone)] +#[derive(Debug, Deserialize, Clone, Copy, PartialEq)] pub enum TestExecutor { Napi, Wasm, + Mobile, } impl Display for TestExecutor { @@ -18,6 +19,7 @@ impl Display for TestExecutor { match self { TestExecutor::Napi => f.write_str("Napi"), TestExecutor::Wasm => f.write_str("Wasm"), + TestExecutor::Mobile => f.write_str("Mobile"), } } } @@ -39,7 +41,7 @@ pub struct TestConfig { /// Used when testing driver adapters, this process is expected to be a javascript process /// loading the library engine (as a library, or WASM modules) and providing it with a /// driver adapter. - /// Possible values: Napi, Wasm + /// Possible values: Napi, Wasm, Mobile /// Env key: `EXTERNAL_TEST_EXECUTOR` pub(crate) external_test_executor: Option, @@ -162,10 +164,10 @@ impl TestConfig { fn from_file() -> Option { let current_dir = env::current_dir().ok(); - - current_dir + let result = current_dir .and_then(|path| Self::try_path(config_path(path))) - .or_else(|| Self::workspace_root().and_then(|path| Self::try_path(config_path(path)))) + .or_else(|| Self::workspace_root().and_then(|path| Self::try_path(config_path(path)))); + result } fn try_path(path: PathBuf) -> Option { @@ -244,10 +246,12 @@ impl TestConfig { } } - if self.external_test_executor.is_some() && self.driver_adapter.is_none() { - exit_with_message( - "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", - ); + if let Some(external_test_executor) = &self.external_test_executor { + if *external_test_executor != TestExecutor::Mobile && self.driver_adapter.is_none() { + exit_with_message( + "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); + } } if self.driver_adapter.is_some() && self.external_test_executor.is_none() { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs index 2173bbdd38f2..2194f6e6404b 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs @@ -29,6 +29,7 @@ impl ConnectorTagInterface for SqliteConnectorTag { #[derive(Clone, Debug, PartialEq, Eq)] pub enum SqliteVersion { V3, + RN, LibsqlJsNapi, LibsqlJsWasm, } @@ -36,6 +37,7 @@ pub enum SqliteVersion { impl ToString for SqliteVersion { fn to_string(&self) -> String { match self { + SqliteVersion::RN => "rn".to_string(), SqliteVersion::V3 => "3".to_string(), SqliteVersion::LibsqlJsNapi => "libsql.js".to_string(), SqliteVersion::LibsqlJsWasm => "libsql.js.wasm".to_string(), @@ -51,6 +53,7 @@ impl TryFrom<&str> for SqliteVersion { "3" => Self::V3, "libsql.js" => Self::LibsqlJsNapi, "libsql.js.wasm" => Self::LibsqlJsWasm, + "rn" => Self::RN, _ => return Err(TestError::parse_error(format!("Unknown SQLite version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/test-configs/react-native b/query-engine/connector-test-kit-rs/test-configs/react-native new file mode 100644 index 000000000000..0f1ab57860e3 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/react-native @@ -0,0 +1,5 @@ +{ + "connector": "sqlite", + "version": "rn", + "external_test_executor": "Mobile" +} diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 4c55cff55420..68ef5bf4494d 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -10,15 +10,8 @@ sqlite = ["quaint/sqlite", "psl/sqlite"] mssql = ["quaint/mssql"] cockroachdb = ["relation_joins", "quaint/postgresql", "psl/cockroachdb"] vendored-openssl = ["quaint/vendored-openssl"] -native_all = [ - "sqlite", - "mysql", - "postgresql", - "mssql", - "cockroachdb", - "quaint/native", - "quaint/pooled", -] +all = ["sqlite", "mysql", "postgresql", "mssql", "cockroachdb", "native"] +native = ["quaint/native", "quaint/pooled"] # TODO: At the moment of writing (rustc 1.77.0), can_have_capability from psl does not eliminate joins # code from bundle for some reason, so we are doing it explicitly. Check with a newer version of compiler - if elimination # happens successfully, we don't need this feature anymore @@ -45,6 +38,9 @@ opentelemetry = { version = "0.17", features = ["tokio"] } tracing-opentelemetry = "0.17.3" cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } quaint.workspace = true +# quaint = { path = "../../../quaint", default-features = false, features = [ +# "sqlite", +# ] } [dependencies.connector-interface] diff --git a/query-engine/connectors/sql-query-connector/src/database/mod.rs b/query-engine/connectors/sql-query-connector/src/database/mod.rs index 513100250c8f..e0ec3f7e29e5 100644 --- a/query-engine/connectors/sql-query-connector/src/database/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/database/mod.rs @@ -3,11 +3,15 @@ mod connection; mod js; mod transaction; -#[cfg(not(target_arch = "wasm32"))] +#[cfg(feature = "native")] pub(crate) mod native { + #[cfg(feature = "mssql")] pub(crate) mod mssql; + #[cfg(feature = "mysql")] pub(crate) mod mysql; + #[cfg(feature = "postgresql")] pub(crate) mod postgresql; + #[cfg(feature = "sqlite")] pub(crate) mod sqlite; } @@ -19,8 +23,17 @@ use connector_interface::{error::ConnectorError, Connector}; #[cfg(feature = "driver-adapters")] pub use js::*; -#[cfg(not(target_arch = "wasm32"))] -pub use native::{mssql::*, mysql::*, postgresql::*, sqlite::*}; +#[cfg(all(feature = "native", feature = "mssql"))] +pub use native::mssql::*; + +#[cfg(all(feature = "native", feature = "mysql"))] +pub use native::mysql::*; + +#[cfg(all(feature = "native", feature = "postgresql"))] +pub use native::postgresql::*; + +#[cfg(all(feature = "native", feature = "sqlite"))] +pub use native::sqlite::*; #[async_trait] pub trait FromSource { diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index 8dc26bda5c25..52bc33a51b0c 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -27,7 +27,16 @@ pub use database::FromSource; pub use database::Js; pub use error::SqlError; -#[cfg(not(target_arch = "wasm32"))] -pub use database::{Mssql, Mysql, PostgreSql, Sqlite}; +#[cfg(all(feature = "native", feature = "mssql"))] +pub use database::Mssql; + +#[cfg(all(feature = "native", feature = "mysql"))] +pub use database::Mysql; + +#[cfg(all(feature = "native", feature = "postgresql"))] +pub use database::PostgreSql; + +#[cfg(all(feature = "native", feature = "sqlite"))] +pub use database::Sqlite; type Result = std::result::Result; diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index c215f37b6fd5..6942e0ed36f5 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -30,12 +30,12 @@ "@prisma/adapter-planetscale": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", "@prisma/bundled-js-drivers": "workspace:*", - "mitata": "0.1.11", - "undici": "6.7.0", - "ws": "8.16.0" + "mitata": "^0.1.6", + "undici": "6.6.2", + "ws": "8.14.2" }, "devDependencies": { - "@types/node": "20.11.24", + "@types/node": "20.10.8", "tsup": "8.0.2", "typescript": "5.3.3" } diff --git a/query-engine/driver-adapters/executor/src/rn.ts b/query-engine/driver-adapters/executor/src/rn.ts new file mode 100644 index 000000000000..fd00e517bdbe --- /dev/null +++ b/query-engine/driver-adapters/executor/src/rn.ts @@ -0,0 +1,79 @@ +import Axios from "axios"; + +export function createRNEngineConnector(url: string, schema: string, logCallback: (msg: string) => void) { + const port = "3000"; + const baseIP = "192.168.0.14"; + const deviceUrl = `http://${baseIP}:${port}`; + const axios = Axios.create({ + baseURL: deviceUrl, + headers: { + "Content-Type": "application/json", + }, + transformResponse: (r) => r, + }); + + // axios.get("/ping").then(() => { + // console.error(`✅ Connection to RN device successful! URL: ${deviceUrl}`); + // }).catch(() => { + // throw new Error(`Could not ping device! Check server is runing on IP: ${deviceUrl}`) + // }) + + return { + connect: async () => { + const res = await axios.post(`/connect`, { + schema, + }); + return res.data; + }, + query: async ( + body: string, + trace: string, + txId: string + ): Promise => { + const res = await axios.post("/query", { + body, + trace, + txId, + }); + + const response = JSON.parse(res.data) + + if(response.logs.length) { + response.logs.forEach(logCallback) + } + + return response.engineResponse; + }, + startTransaction: async (body: string, trace: string): Promise => { + const res = await axios.post("/start_transaction", { + body, + trace, + }); + // console.error("start transaction data", res.data); + return res.data; + }, + commitTransaction: async (txId: string, trace: string): Promise => { + const res = await axios.post("/commit_transaction", { + txId, + trace, + }); + // console.error(`🐲 ${res.data}`); + return res.data; + }, + rollbackTransaction: async ( + txId: string, + trace: string + ): Promise => { + const res = await axios.post("/rollback_transaction", { + txId, + trace, + }); + return res.data; + }, + disconnect: async (trace: string) => { + await axios.post("/disconnect", { + trace, + }); + }, + }; +} diff --git a/query-engine/driver-adapters/executor/src/testd.ts b/query-engine/driver-adapters/executor/src/testd.ts index 4345887fe659..cc9a3de3cf4c 100644 --- a/query-engine/driver-adapters/executor/src/testd.ts +++ b/query-engine/driver-adapters/executor/src/testd.ts @@ -20,6 +20,7 @@ import { PrismaPlanetScale } from '@prisma/adapter-planetscale' import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; import { webcrypto } from 'node:crypto'; +import { createRNEngineConnector } from './rn' if (!global.crypto) { global.crypto = webcrypto as Crypto @@ -112,7 +113,8 @@ async function handleRequest(method: string, params: unknown): Promise txId?: string } - debug("Got `query`", params) + debug("🔷 Test query params") + debug('\x1b[36m', JSON.stringify(params, null, 2), '\x1b[0m'); const castParams = params as QueryPayload; const engine = state[castParams.schemaId].engine const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId) @@ -130,7 +132,7 @@ async function handleRequest(method: string, params: unknown): Promise } } - debug("got response from engine: ", result) + debug("🟢 Engine response: ", result) // returning unparsed string: otherwise, some information gots lost during this round-trip. // In particular, floating point without decimal part turn into integers return result @@ -217,11 +219,16 @@ function respondOk(requestId: number, payload: unknown) { } async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[qe.QueryEngine, ErrorCapturingDriverAdapter]> { - const engineType = process.env.EXTERNAL_TEST_EXECUTOR === "Wasm" ? "Wasm" : "Napi"; - const adapter = await adapterFromEnv(url) as DriverAdapter - const errorCapturingAdapter = bindAdapter(adapter) - const engineInstance = await qe.initQueryEngine(engineType, errorCapturingAdapter, prismaSchema, logCallback, debug) - return [engineInstance, errorCapturingAdapter]; + if(process.env.EXTERNAL_TEST_EXECUTOR === "Mobile") { + const engineInstance = createRNEngineConnector(url, prismaSchema, logCallback) + return [engineInstance, {} as any]; + } else { + const engineType = process.env.EXTERNAL_TEST_EXECUTOR === "Wasm" ? "Wasm" : "Napi"; + const adapter = await adapterFromEnv(url) as DriverAdapter + const errorCapturingAdapter = bindAdapter(adapter) + const engineInstance = await qe.initQueryEngine(engineType, errorCapturingAdapter, prismaSchema, logCallback, debug) + return [engineInstance, errorCapturingAdapter]; + } } async function adapterFromEnv(url: string): Promise { diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json index 6682ebf08ac6..d6203e262d3d 100644 --- a/query-engine/driver-adapters/package.json +++ b/query-engine/driver-adapters/package.json @@ -16,10 +16,10 @@ "keywords": [], "author": "", "devDependencies": { - "@types/node": "20.11.24", - "esbuild": "0.20.1", + "@types/node": "20.10.8", + "esbuild": "0.19.12", "tsup": "8.0.2", - "tsx": "4.7.1", + "tsx": "^4.7.0", "typescript": "5.3.3" } } diff --git a/query-engine/query-engine-c-abi/.gitignore b/query-engine/query-engine-c-abi/.gitignore new file mode 100644 index 000000000000..c41332e611c8 --- /dev/null +++ b/query-engine/query-engine-c-abi/.gitignore @@ -0,0 +1,10 @@ +QueryEngine.xcframework +simulator_fat +# Artifacts of the C ABI engine +*.tar.gz +openssl-1.1.1w +openssl-3.2.0 +openssl-3.1.4 +rust-openssl +libs +include \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/Cargo.toml b/query-engine/query-engine-c-abi/Cargo.toml new file mode 100644 index 000000000000..6b58e43175aa --- /dev/null +++ b/query-engine/query-engine-c-abi/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "query-engine-c-abi" +version = "0.1.0" +edition = "2021" + +[lib] +doc = false +crate-type = ["staticlib"] +name = "query_engine" + +[dependencies] +anyhow = "1" +async-trait = "0.1" +query-core = { path = "../core" } +request-handlers = { path = "../request-handlers", features = [ + "sqlite", + "native", +] } +query-connector = { path = "../connectors/query-connector" } +query-engine-common = { path = "../../libs/query-engine-common" } +user-facing-errors = { path = "../../libs/user-facing-errors" } +psl = { workspace = true, features = ["sqlite"] } +sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector" } +query-structure = { path = "../query-structure" } +chrono.workspace = true +quaint = { path = "../../quaint", default-features = false, features = [ + "sqlite", +] } +rusqlite = "0.29" +uuid.workspace = true +thiserror = "1" +connection-string.workspace = true +url = "2" +serde_json.workspace = true +serde.workspace = true +indoc.workspace = true + +tracing = "0.1" +tracing-subscriber = { version = "0.3" } +tracing-futures = "0.2" +tracing-opentelemetry = "0.17.3" +opentelemetry = { version = "0.17" } + +tokio.workspace = true +futures = "0.3" +once_cell = "1.19.0" + +[build-dependencies] +cbindgen = "0.24.0" diff --git a/query-engine/query-engine-c-abi/Makefile b/query-engine/query-engine-c-abi/Makefile new file mode 100644 index 000000000000..1688ace84c08 --- /dev/null +++ b/query-engine/query-engine-c-abi/Makefile @@ -0,0 +1,59 @@ +# rustup target add x86_64-apple-ios # intel simulator +# rustup target add aarch64-apple-ios # actual iOS +# rustup target add aarch64-apple-ios-sim # arm simulator + +# rustup target add aarch64-linux-android # Android arm 64 bits +# rustup target add x86_64-linux-android # Intel 64 bits emulator +# rustup target add armv7-linux-androideabi # Android arm 32 bits +# rustup target add i686-linux-android # Intel 32 bits emulator + +ARCH_IOS_SIM = aarch64-apple-ios-sim +ARCHS_IOS = x86_64-apple-ios aarch64-apple-ios aarch64-apple-ios-sim +# ARCHS_IOS = aarch64-apple-ios aarch64-apple-ios-sim +ARCHS_ANDROID = aarch64-linux-android armv7-linux-androideabi x86_64-linux-android i686-linux-android +LIB = libquery_engine.a +XCFRAMEWORK = QueryEngine.xcframework + +.PHONY: clean ios android $(ARCH_IOS_SIM) $(ARCHS_IOS) $(ARCHS_ANDROID) sim copy-ios nuke + +nuke: + rm -rf ../../target + +clean: + rm -rf QueryEngine.xcframework + rm -rf simulator_fat + mkdir simulator_fat + # rm -rf include + # mkdir include + +all: nuke ios android + +################# ANDROID ################# +android: clean $(ARCHS_ANDROID) + ./copy-android.sh + +$(ARCHS_ANDROID): %: + ./build-android-target.sh $@ + +################# iOS ################# +ios: clean $(XCFRAMEWORK) + +sim: clean + cargo build --target $(ARCH_IOS_SIM) + xcodebuild -create-xcframework -library ../../target/$(ARCH_IOS_SIM)/debug/libquery_engine.a -headers include -output $(XCFRAMEWORK) + ./copy-ios.sh + +sim-release: clean + cargo build --target $(ARCH_IOS_SIM) --release + xcodebuild -create-xcframework -library ../../target/$(ARCH_IOS_SIM)/release/libquery_engine.a -headers include -output $(XCFRAMEWORK) + ./copy-ios.sh + +$(ARCHS_IOS): %: + cargo build --release --target $@ + +$(XCFRAMEWORK): $(ARCHS_IOS) +# Library is too large to be published with all the targets on NPM. Therefore we are not shipping the intel version (for older intel macs) + lipo -create $(wildcard ../../target/x86_64-apple-ios/release/$(LIB)) $(wildcard ../../target/aarch64-apple-ios-sim/release/$(LIB)) -output simulator_fat/libquery_engine.a + xcodebuild -create-xcframework -library $(wildcard ../../target/aarch64-apple-ios/release/$(LIB)) -headers include -library simulator_fat/libquery_engine.a -headers include -output $@ + # xcodebuild -create-xcframework -library $(wildcard ../../target/aarch64-apple-ios/release/$(LIB)) -headers include -library $(wildcard ../../target/aarch64-apple-ios-sim/release/$(LIB)) -headers include -output $@ + ./copy-ios.sh \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/README.md b/query-engine/query-engine-c-abi/README.md new file mode 100644 index 000000000000..20d57d8ceae2 --- /dev/null +++ b/query-engine/query-engine-c-abi/README.md @@ -0,0 +1,51 @@ +# Query Engine C (compatible) ABI + +This version of the query engine exposes the Rust engine via C callable functions. There are subtle differences to this implementation compared to the node and wasm versions. Although it is usable by any language that can operate with the C ABI, it is oriented to having prisma running on react-native so the build scripts are oriented to that goal. + +## Setup + +You need to have XCode, Java, Androids NDK (you can/should install it via Android Studio), Cmake installed on your machine to compile the engine. The make file contains the main entry points for building the different architectures and platforms. You also need to install the target Rust architectures. You can find the exact [process described here](https://ospfranco.com/post/2023/08/11/react-native,-rust-step-by-step-integration-guide/). + +- `make ios` → Builds the iOS libraries in release mode +- `make sim` → Builds the simulator arch only for rapid development +- `make android` → Builds all the android archs + +Once the libraries have been built there are a couple of extra scripts (`copy-ios.sh` and `copy-android.sh`) that move the results of the compilation into a sibling of the parent folder (`react-native-prisma`), which is where they will be packaged and published to npm. + +The result of the compilation are static libraries (.a) as well a generated C header file. + +A C header file (`include/query_engine.h`) is automatically generated on the compilation process via `cbindgen`. There is no need to manually modify this file, it will be automatically generated and packaged each time you compile the library. You need to mark the functions inside `engine.rs` as `extern "C"` so that the generator picks them up. + +### iOS + +iOS requires the use of `.xcframework` to package similar architectures (proper iOS and iOS 64 bit simulator thanks to m1 machines) without conflicts. + +## Base Path + +This query engine takes one additional parameter in the create function (the entry point of all operations), which is the `base_path` string param. This param is meant to allow the query engine to change it's working directory to the passed path. This is required on iOS (and on the latest versions of Android) because the file system is sandboxed. The react-native client library that consumes this version of the engine passes the Library directory on iOS and the Databases folder on Android, both of this folders are within the sandbox and can be freely read and written. The implemented solution literally just changes the working directory of the Rust code in order to allow the query engine to operate as if it was working on a non-sandboxed platform and allowed to the query engine to run without changing implementation details and even hackier workarounds. It might have unintented consequences on the behavior of the engine though, so if you have any issues please report them. + +## Migrations + +This query engine version also contains parts of the schema engine. Previous versions of prisma were meant to be run on the server by the developer to test migrations or execute them for a single server database. Now that we are targeting front-end platforms, it is required to be able to perform migrations ON-DEVICE and on RUNTIME. + +In order to enable this there are some new functions exposed through the query engine api that call schema engine. + +- `prisma_apply_pending_migrations` → Given a path, it will scan all the folders in alphabetical order all look inside for a `migration.sql` and execute that. It's equivalent (it literally calls the same internal function) as `prisma migrate dev` + +- `prisma_push_schema` → Will try to apply the passed schema into the database in an unsafe manner. Some data might be lost. It's equivalent to `prisma db push` + +## Usage + +Like any C-API, returning multiple chunks of data is done via passing pointers (e.g. SQLite). Especially the query engine instanciation, will return a obfuscated pointer allocated on the heap. You need to pass this pointer to each subsequent call to the interfaces to use the query engine functionality. + +Each operation should return an integer status code that indicates PRISMA_OK (0) if the opereation finished correctly or different error codes for each possible error. + +C calls are not compatible with tokio/async, so the C functions need to use `block_on` in order to keep synchronisity. If async functionality is wanted the calling language/environment should spin up their own threads and call the functions in there. + +While `block_on` might not be the most efficient way to achieve things, it keeps changes to the core query_engine functionality at a minimum. + +## OpenSSL Snafu + +The query engine (to be exact, different database connectors) depends on OpenSSL, however, the Rust crate tries to compile the latest version which [currently has a problem with Android armv7 architectures](https://github.com/openssl/openssl/pull/22181). In order to get around this, we have to download OpenSSL, patch it, compile and link it manually. The download, patching and compiling is scripted via the `build-openssl.sh` script. You need to have the Android NDK installed and the `ANDROID_NDK_ROOT` variable set in your environment before running this script. You can find more info on the script itself. The libraries will be outputed in the `libs` folder with the specific structure the Rust compilation needs to finish linking OpenSSL in the main query engine compilation. The crate `openssl` then uses the compiled version by detecting the `OPENSSL_DIR` flag which is set in the `build-android-target.sh` script. + +Once the issues upstream are merged we can get rid of this custom compilation step. \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/build-android-target.sh b/query-engine/query-engine-c-abi/build-android-target.sh new file mode 100755 index 000000000000..16104cfef11c --- /dev/null +++ b/query-engine/query-engine-c-abi/build-android-target.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +TARGET="$1" + +if [ "$TARGET" = "" ]; then + echo "missing argument TARGET" + echo "Usage: $0 TARGET" + exit 1 +fi + +NDK_TARGET=$TARGET + +if [ "$TARGET" = "armv7-linux-androideabi" ]; then + NDK_TARGET="armv7a-linux-androideabi" +fi + +OPENSSL_ARCH="android-arm64" +# if [ "$TARGET" = "aarch64-linux-android" ]; then +# fi + +if [ "$TARGET" = "x86_64-linux-android" ]; then + OPENSSL_ARCH="android-x86_64" +fi + +if [ "$TARGET" = "armv7-linux-androideabi" ]; then + OPENSSL_ARCH="android-arm" +fi + +if [ "$TARGET" = "i686-linux-android" ]; then + OPENSSL_ARCH="android-x86" +fi + + +API_VERSION="21" +NDK_VERSION="26.0.10792818" +NDK_HOST="darwin-x86_64" + +if [ -z "$ANDROID_SDK_ROOT" ]; then + echo "ANDROID SDK IS MISSING 🟥" + exit 1 +fi + +if [ -z "$NDK" ]; then + NDK="$ANDROID_SDK_ROOT/ndk/$NDK_VERSION" +fi + +TOOLS="$NDK/toolchains/llvm/prebuilt/$NDK_HOST" + +CWD=$(pwd) + +export OPENSSL_DIR=$CWD/libs/$OPENSSL_ARCH +export OPENSSL_STATIC=1 + +# OPENSSL_DIR=./libs/android/clang/${OPENSSL_ARCH} \ +AR=$TOOLS/bin/llvm-ar \ +CC=$TOOLS/bin/${NDK_TARGET}${API_VERSION}-clang \ +CXX=$TOOLS/bin/${NDK_TARGET}${API_VERSION}-clang++ \ +RANLIB=$TOOLS/bin/llvm-ranlib \ +CXXFLAGS="--target=$NDK_TARGET" \ +cargo build --release --target $TARGET $EXTRA_ARGS \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/build-openssl.sh b/query-engine/query-engine-c-abi/build-openssl.sh new file mode 100755 index 000000000000..88621d06639c --- /dev/null +++ b/query-engine/query-engine-c-abi/build-openssl.sh @@ -0,0 +1,78 @@ +#!/bin/bash + +#set -v +set -ex + +export OPENSSL_VERSION="openssl-3.1.4" +rm -rf ${OPENSSL_VERSION} +# check if the tar is already downloaded and if not download and extract it +if [ ! -d ${OPENSSL_VERSION}.tar.gz ]; then + curl -O "https://www.openssl.org/source/${OPENSSL_VERSION}.tar.gz" + tar xfz "${OPENSSL_VERSION}.tar.gz" +fi + +PROJECT_HOME=`pwd` +PATH_ORG=$PATH +OUTPUT_DIR="libs" + +# Clean output: +rm -rf $OUTPUT_DIR +mkdir $OUTPUT_DIR + +build_android_clang() { + + echo "" + echo "----- Build libcrypto & libssl.so for "$1" -----" + echo "" + + ARCHITECTURE=$1 + TOOLCHAIN=$2 + stl="libc++" + + # Set toolchain + export TOOLCHAIN_ROOT=$ANDROID_NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64 + export SYSROOT=$TOOLCHAIN_ROOT/sysroot + export CC=${TOOLCHAIN}21-clang + export CXX=${TOOLCHAIN}21-clang++ + export CXXFLAGS="-fPIC" + export CPPFLAGS="-DANDROID -fPIC" + + export PATH=$TOOLCHAIN_ROOT/bin:$SYSROOT/usr/local/bin:$PATH + + cd "${OPENSSL_VERSION}" + + ./Configure $ARCHITECTURE no-asm no-shared -D__ANDROID_API__=21 + + make clean + # Apply patch that fixes the armcap instruction + # Linux version + # sed -e '/[.]hidden.*OPENSSL_armcap_P/d; /[.]extern.*OPENSSL_armcap_P/ {p; s/extern/hidden/ }' -i -- crypto/*arm*pl crypto/*/asm/*arm*pl + # macOS version + sed -E -i '' -e '/[.]hidden.*OPENSSL_armcap_P/d' -e '/[.]extern.*OPENSSL_armcap_P/ {p; s/extern/hidden/; }' crypto/*arm*pl crypto/*/asm/*arm*pl + + make + + mkdir -p ../$OUTPUT_DIR/${ARCHITECTURE}/lib + mkdir -p ../$OUTPUT_DIR/${ARCHITECTURE}/include + + # file libcrypto.so + # file libssl.so + + cp libcrypto.a ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libcrypto.a + cp libssl.a ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libssl.a + # cp libcrypto.so ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libcrypto.so + # cp libssl.so ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libssl.so + + cp -R include/openssl ../$OUTPUT_DIR/${ARCHITECTURE}/include + + cd .. +} + +build_android_clang "android-arm" "armv7a-linux-androideabi" +build_android_clang "android-x86" "i686-linux-android" +build_android_clang "android-x86_64" "x86_64-linux-android" +build_android_clang "android-arm64" "aarch64-linux-android" + +export PATH=$PATH_ORG + +# pingme "OpenSSL finished compiling" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/build.rs b/query-engine/query-engine-c-abi/build.rs new file mode 100644 index 000000000000..0739d31bf255 --- /dev/null +++ b/query-engine/query-engine-c-abi/build.rs @@ -0,0 +1,33 @@ +extern crate cbindgen; + +use std::env; +use std::process::Command; + +fn store_git_commit_hash() { + let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); + let git_hash = String::from_utf8(output.stdout).unwrap(); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); +} + +fn generate_c_headers() { + let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + + cbindgen::Builder::new() + .with_crate(crate_dir) + .with_language(cbindgen::Language::C) + .with_include_guard("query_engine_h") + .with_autogen_warning("/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */") + .with_namespace("prisma") + .with_cpp_compat(true) + .generate() + .expect("Unable to generate bindings") + .write_to_file("include/query_engine.h"); +} + +fn main() { + // Tell Cargo that if the given file changes, to rerun this build script. + println!("cargo:rerun-if-changed=src/engine.rs"); + // println!("✅ Running build.rs"); + store_git_commit_hash(); + generate_c_headers(); +} diff --git a/query-engine/query-engine-c-abi/cargo-config.toml b/query-engine/query-engine-c-abi/cargo-config.toml new file mode 100644 index 000000000000..68151bfbd7b6 --- /dev/null +++ b/query-engine/query-engine-c-abi/cargo-config.toml @@ -0,0 +1,14 @@ +# template file +# move this to your home directory to allow rust to compile the library for android +# All paths are relative to the user home folder +[target.aarch64-linux-android] +linker = "Library/Android/sdk/ndk/26.0.10792818/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang" + +[target.armv7-linux-androideabi] +linker = "Library/Android/sdk/ndk/26.0.10792818/toolchains/llvm/prebuilt/darwin-x86_64/bin/armv7a-linux-androideabi21-clang" + +[target.i686-linux-android] +linker = "Library/Android/sdk/ndk/26.0.10792818/toolchains/llvm/prebuilt/darwin-x86_64/bin/i686-linux-android21-clang" + +[target.x86_64-linux-android] +linker = "Library/Android/sdk/ndk/26.0.10792818/toolchains/llvm/prebuilt/darwin-x86_64/bin/x86_64-linux-android21-clang" diff --git a/query-engine/query-engine-c-abi/copy-android.sh b/query-engine/query-engine-c-abi/copy-android.sh new file mode 100755 index 000000000000..0711b0ba5fcb --- /dev/null +++ b/query-engine/query-engine-c-abi/copy-android.sh @@ -0,0 +1,18 @@ +#! /bin/bash + +TARGET_DIR=../../../react-native-prisma + +mkdir -p $TARGET_DIR/android/jniLibs +mkdir -p $TARGET_DIR/android/jniLibs/x86 +mkdir -p $TARGET_DIR/android/jniLibs/x86_64 +mkdir -p $TARGET_DIR/android/jniLibs/arm64-v8a +mkdir -p $TARGET_DIR/android/jniLibs/armeabi-v7a + +cp ../../target/i686-linux-android/release/libquery_engine.a $TARGET_DIR/android/jniLibs/x86/libquery_engine.a +cp ../../target/aarch64-linux-android/release/libquery_engine.a $TARGET_DIR/android/jniLibs/arm64-v8a/libquery_engine.a +cp ../../target/armv7-linux-androideabi/release/libquery_engine.a $TARGET_DIR/android/jniLibs/armeabi-v7a/libquery_engine.a +cp ../../target/x86_64-linux-android/release/libquery_engine.a $TARGET_DIR/android/jniLibs/x86_64/libquery_engine.a + +cp ./include/query_engine.h $TARGET_DIR/cpp/query_engine.h + +pingme "✅ Android compilation ready" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/copy-ios.sh b/query-engine/query-engine-c-abi/copy-ios.sh new file mode 100755 index 000000000000..c8eb73e89126 --- /dev/null +++ b/query-engine/query-engine-c-abi/copy-ios.sh @@ -0,0 +1,14 @@ +#!/bin/sh + +set -ex + +TARGET_DIR=../../../react-native-prisma + +# This one is not actually necessary but XCode picks it up and mixes up versions +cp ./include/query_engine.h $TARGET_DIR/cpp/query_engine.h + +rm -rf "$TARGET_DIR/QueryEngine.xcframework" + +cp -R QueryEngine.xcframework "$TARGET_DIR/QueryEngine.xcframework" + +pingme "✅ Prisma iOS Finished" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs new file mode 100644 index 000000000000..95a72312b868 --- /dev/null +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -0,0 +1,653 @@ +use crate::{ + error::ApiError, + logger::Logger, + migrations::{ + execute_migration_script, list_migration_dir, list_migrations, record_migration_started, MigrationDirectory, + }, +}; +use once_cell::sync::Lazy; +use query_core::{ + protocol::EngineProtocol, + schema::{self}, + telemetry, TransactionOptions, TxId, +}; +use request_handlers::{load_executor, RequestBody, RequestHandler}; +use serde_json::json; +use std::{ + env, + ffi::{c_char, c_int, CStr, CString}, + path::{Path, PathBuf}, + ptr::null_mut, + sync::Arc, +}; +use tokio::{ + runtime::{self, Runtime}, + sync::RwLock, +}; +use tracing::{field, instrument::WithSubscriber, level_filters::LevelFilter, Instrument, Span}; +use user_facing_errors::Error; + +use query_engine_common::engine::{ + stringify_env_values, ConnectedEngine, ConnectedEngineNative, EngineBuilder, EngineBuilderNative, Inner, +}; +use request_handlers::ConnectorKind; +// use tracing_subscriber::filter::LevelFilter; + +// The query engine code is async by nature, however the C API does not function with async functions +// This runtime is here to allow the C API to block_on it and return the responses in a sync manner +static RUNTIME: Lazy = Lazy::new(|| runtime::Builder::new_multi_thread().enable_all().build().unwrap()); + +// C-like return codes +#[no_mangle] +pub static PRISMA_OK: i32 = 0; +#[no_mangle] +pub static PRISMA_UNKNOWN_ERROR: i32 = 1; +#[no_mangle] +pub static PRISMA_MISSING_POINTER: i32 = 2; + +/// This struct holds an instance of the prisma query engine +/// You can instanciate as many as you want +pub struct QueryEngine { + // This String pointer will be pointed to by passed error pointer and will allow calling code to have a bit more information when something goes wrong + error_string: RwLock, + inner: RwLock, + base_path: Option, + logger: Logger, +} + +#[repr(C)] +pub struct ConstructorOptionsNative { + pub config_dir: *const c_char, +} + +/// Parameters defining the construction of an engine. +/// Unlike the Node version, this doesn't support the GraphQL protocol for talking with the prisma/client, since it is +/// deprecated and going forward everything should be done via JSON rpc. +#[repr(C)] +pub struct ConstructorOptions { + id: *const c_char, + datamodel: *const c_char, + // Used on iOS/Android to navigate to the sandboxed app folder to execute all file operations + base_path: *const c_char, + log_level: *const c_char, + log_queries: bool, + datasource_overrides: *const c_char, + env: *const c_char, + ignore_env_var_errors: bool, + native: ConstructorOptionsNative, + log_callback: unsafe extern "C" fn(*const c_char, *const c_char), +} + +fn get_cstr_safe(ptr: *const c_char) -> Option { + if ptr == std::ptr::null_mut() { + None + } else { + let cstr = unsafe { CStr::from_ptr(ptr) }; + Some(String::from_utf8_lossy(cstr.to_bytes()).to_string()) + } +} + +fn map_known_error(err: query_core::CoreError) -> crate::Result { + let user_error: user_facing_errors::Error = err.into(); + let value = serde_json::to_string(&user_error)?; + + Ok(value) +} + +// Struct that holds an internal prisma engine +// the inner prop holds the internal state, it starts as a Builder +// meaning it is not connected to the database +// a call to connect is necessary to start executing queries +impl QueryEngine { + /// Parse a valid datamodel and configuration to allow connecting later on. + pub fn new(constructor_options: ConstructorOptions) -> Result { + // Create a logs closure that can be passed around and called at any time + // safe scheduling should be taken care by the code/language/environment calling this C-compatible API + let engine_id = get_cstr_safe(constructor_options.id).expect("engine id cannot be missing"); + let log_callback_c = constructor_options.log_callback; + let log_callback = move |msg: String| { + let id = CString::new(engine_id.clone()).unwrap(); + let c_message = CString::new(msg).unwrap(); + unsafe { + log_callback_c(id.as_ptr(), c_message.as_ptr()); + } + }; + + let str_env = get_cstr_safe(constructor_options.env).expect("Environment missing"); + let json_env = serde_json::from_str(str_env.as_str()).expect("Environment cannot be parsed"); + let env = stringify_env_values(json_env)?; // we cannot trust anything JS sends us from process.env + + let str_datasource_overrides = + get_cstr_safe(constructor_options.datasource_overrides).expect("Datesource overrides missing"); + let json_datasource_overrides = + serde_json::from_str(str_datasource_overrides.as_str()).expect("Datasource overrides cannot be parsed"); + let overrides: Vec<(_, _)> = stringify_env_values(json_datasource_overrides) + .unwrap() + .into_iter() + .collect(); + + let datamodel = get_cstr_safe(constructor_options.datamodel).expect("Datamodel must be present"); + let mut schema = psl::validate(datamodel.into()); + let config = &mut schema.configuration; + + // let preview_features = config.preview_features(); + + schema + .diagnostics + .to_result() + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + let base_path = get_cstr_safe(constructor_options.base_path); + match &base_path { + Some(path) => env::set_current_dir(Path::new(&path)).expect("Could not change directory"), + _ => println!("No base path passed!"), + } + + config + .resolve_datasource_urls_query_engine( + &overrides, + |key| env.get(key).map(ToString::to_string), + // constructor_options.ignore_env_var_errors, + true, + ) + .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + + config + .validate_that_one_datasource_is_provided() + .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + + // let enable_tracing = config.preview_features().contains(PreviewFeature::Tracing); + // let engine_protocol = constructor_options.engine_protocol.unwrap_or(EngineProtocol::Json); + let engine_protocol = EngineProtocol::Json; + + let config_dir_string = get_cstr_safe(constructor_options.native.config_dir).expect("Config dir is expected"); + let config_dir = PathBuf::from(config_dir_string); + + let builder = EngineBuilder { + schema: Arc::new(schema), + engine_protocol, + native: EngineBuilderNative { config_dir, env }, + }; + + let log_level_string = get_cstr_safe(constructor_options.log_level).unwrap(); + let log_level = log_level_string.parse::().unwrap(); + let logger = Logger::new( + constructor_options.log_queries, + log_level, + Box::new(log_callback), + false, + ); + + Ok(Self { + error_string: RwLock::new(CString::new("").unwrap()), + inner: RwLock::new(Inner::Builder(builder)), + base_path, + logger, + }) + } + + pub async fn connect(&self, trace: *const c_char) -> Result<(), Error> { + if let Some(base_path) = self.base_path.as_ref() { + env::set_current_dir(Path::new(&base_path)).expect("Could not change directory"); + } + + let trace_string = get_cstr_safe(trace).expect("Connect trace is missing"); + + let span = tracing::info_span!("prisma:engine:connect"); + let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace_string); + + let mut inner = self.inner.write().await; + let builder = inner.as_builder()?; + let arced_schema = Arc::clone(&builder.schema); + let arced_schema_2 = Arc::clone(&builder.schema); + + let engine = async move { + // We only support one data source & generator at the moment, so take the first one (default not exposed yet). + let data_source = arced_schema + .configuration + .datasources + .first() + .ok_or_else(|| ApiError::configuration("No valid data source found"))?; + + let preview_features = arced_schema.configuration.preview_features(); + + let executor_fut = async { + let url = data_source + .load_url_with_config_dir(&builder.native.config_dir, |key| { + builder.native.env.get(key).map(ToString::to_string) + }) + .map_err(|err| crate::error::ApiError::Conversion(err, builder.schema.db.source().to_owned()))?; + // This version of the query engine supports connecting via Rust bindings directly + // support for JS drivers can be added, but I commented it out for now + let connector_kind = ConnectorKind::Rust { + url, + datasource: data_source, + }; + + let executor = load_executor(connector_kind, preview_features).await?; + let connector = executor.primary_connector(); + + let conn_span = tracing::info_span!( + "prisma:engine:connection", + user_facing = true, + "db.type" = connector.name(), + ); + + connector.get_connection().instrument(conn_span).await?; + + crate::Result::<_>::Ok(executor) + }; + + let query_schema_span = tracing::info_span!("prisma:engine:schema"); + let query_schema_fut = tokio::runtime::Handle::current() + .spawn_blocking(move || { + let enable_raw_queries = true; + schema::build(arced_schema_2, enable_raw_queries) + }) + .instrument(query_schema_span); + + let (query_schema, executor) = tokio::join!(query_schema_fut, executor_fut); + + Ok(ConnectedEngine { + schema: builder.schema.clone(), + query_schema: Arc::new(query_schema.unwrap()), + executor: executor?, + engine_protocol: builder.engine_protocol, + native: ConnectedEngineNative { + config_dir: builder.native.config_dir.clone(), + env: builder.native.env.clone(), + #[cfg(all(not(target_os = "ios"), not(target_os = "android")))] + metrics: None, + }, + }) as crate::Result + } + .instrument(span) + .await?; + + *inner = Inner::Connected(engine); + Ok(()) + } + + pub async fn query( + &self, + body_str: *const c_char, + trace_str: *const c_char, + tx_id_str: *const c_char, + ) -> Result { + let dispatcher = self.logger.dispatcher(); + + async move { + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let body = get_cstr_safe(body_str).expect("Prisma engine execute body is missing"); + let tx_id = get_cstr_safe(tx_id_str); + let trace = get_cstr_safe(trace_str).expect("Trace is needed"); + + let query = RequestBody::try_from_str(&body, engine.engine_protocol()).map_err(|err| Error::from(err))?; + + let span = if tx_id.is_none() { + tracing::info_span!("prisma:engine", user_facing = true) + } else { + Span::none() + }; + + let trace_id = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + async move { + let handler = RequestHandler::new(engine.executor(), engine.query_schema(), engine.engine_protocol()); + let response = handler.handle(query, tx_id.map(TxId::from), trace_id).await; + + let serde_span = tracing::info_span!("prisma:engine:response_json_serialization", user_facing = true); + Ok(serde_span.in_scope(|| serde_json::to_string(&response))?) + } + .instrument(span) + .await + } + .with_subscriber(dispatcher) + .await + } + + /// Disconnect and drop the core. Can be reconnected later with `#connect`. + pub async fn disconnect(&self, trace_str: *const c_char) -> Result<(), Error> { + let trace = get_cstr_safe(trace_str).expect("Trace is needed"); + // let dispatcher = self.logger.dispatcher(); + + // async_panic_to_js_error(async { + let span = tracing::info_span!("prisma:engine:disconnect"); + let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + // TODO: when using Node Drivers, we need to call Driver::close() here. + + // async { + let mut inner = self.inner.write().await; + let engine = inner.as_engine()?; + + let builder = EngineBuilder { + schema: engine.schema.clone(), + engine_protocol: engine.engine_protocol(), + native: EngineBuilderNative { + config_dir: engine.native.config_dir.clone(), + env: engine.native.env.clone(), + }, + }; + + *inner = Inner::Builder(builder); + + Ok(()) + // } + // .instrument(span) + // .await + // }) + // .with_subscriber(dispatcher) + // .await + } + + async unsafe fn apply_migrations(&self, migration_folder_path: *const c_char) -> Result<(), Error> { + if let Some(base_path) = self.base_path.as_ref() { + env::set_current_dir(Path::new(&base_path)).expect("Could not change directory"); + } + let migration_folder_path_str = get_cstr_safe(migration_folder_path).unwrap(); + let migration_folder_path = Path::new(&migration_folder_path_str); + let migrations_from_filesystem = list_migration_dir(migration_folder_path)?; + + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + let url = engine + .schema + .configuration + .datasources + .first() + .unwrap() + .load_url(|key| env::var(key).ok()) + .unwrap(); + + let url_without_prefix = url.strip_prefix("file:").unwrap_or(&url); + let database_path = Path::new(url_without_prefix); + + let migrations_from_database = list_migrations(database_path).unwrap(); + + let unapplied_migrations: Vec<&MigrationDirectory> = migrations_from_filesystem + .iter() + .filter(|fs_migration| { + !migrations_from_database + .iter() + .filter(|db_migration: &&crate::migrations::MigrationRecord| db_migration.finished_at.is_some()) + .any(|db_migration| fs_migration.migration_name() == db_migration.migration_name) + }) + .collect(); + + // TODO enable this later + // detect_failed_migrations(&migrations_from_database)?; + + let mut applied_migration_names: Vec = Vec::with_capacity(unapplied_migrations.len()); + + for unapplied_migration in unapplied_migrations { + let script = unapplied_migration.read_migration_script()?; + + record_migration_started(database_path, unapplied_migration.migration_name())?; + + execute_migration_script(database_path, unapplied_migration.migration_name(), &script)?; + + applied_migration_names.push(unapplied_migration.migration_name().to_owned()); + } + + //output applied migrations + dbg!(applied_migration_names); + + Ok(()) + } + + /// If connected, attempts to start a transaction in the core and returns its ID. + pub async fn start_transaction(&self, input_str: *const c_char, trace_str: *const c_char) -> Result { + let input = get_cstr_safe(input_str).expect("Input string missing"); + let trace = get_cstr_safe(trace_str).expect("trace is required in transactions"); + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + // TODO(osp) check with team about this dispatcher + // let dispatcher = self.logger.dispatcher(); + + // async move { + let span = tracing::info_span!("prisma:engine:itx_runner", user_facing = true, itx_id = field::Empty); + telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + let tx_opts: TransactionOptions = serde_json::from_str(&input)?; + match engine + .executor() + .start_tx(engine.query_schema().clone(), engine.engine_protocol(), tx_opts) + .instrument(span) + .await + { + Ok(tx_id) => Ok(json!({ "id": tx_id.to_string() }).to_string()), + Err(err) => Ok(map_known_error(err)?), + } + // } + // .with_subscriber(dispatcher) + // .await + } + + // If connected, attempts to commit a transaction with id `tx_id` in the core. + pub async fn commit_transaction(&self, tx_id_str: *const c_char, _trace: *const c_char) -> Result { + let tx_id = get_cstr_safe(tx_id_str).expect("Input string missing"); + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + // let dispatcher = self.logger.dispatcher(); + + // async move { + match engine.executor().commit_tx(TxId::from(tx_id)).await { + Ok(_) => Ok("{}".to_string()), + Err(err) => Ok(map_known_error(err)?), + } + // } + // .with_subscriber(dispatcher) + // .await + } + + // If connected, attempts to roll back a transaction with id `tx_id` in the core. + pub async fn rollback_transaction(&self, tx_id_str: *const c_char, _trace: *const c_char) -> Result { + let tx_id = get_cstr_safe(tx_id_str).expect("Input string missing"); + // let trace = get_cstr_safe(trace_str).expect("trace is required in transactions"); + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + // let dispatcher = self.logger.dispatcher(); + + // async move { + match engine.executor().rollback_tx(TxId::from(tx_id)).await { + Ok(_) => Ok("{}".to_string()), + Err(err) => Ok(map_known_error(err)?), + } + // } + // .with_subscriber(dispatcher) + // .await + } +} + +// _____ _____ +// /\ | __ \_ _| +// / \ | |__) || | +// / /\ \ | ___/ | | +// / ____ \| | _| |_ +// /_/ \_\_| |_____| +// +// This API is meant to be stateless. This means the box pointer to the query engine structure will be returned to the +// calling code and should be passed to subsequent calls +// +// Be should be careful about not de-allocating the pointer +// when adding a new function remember to always call mem::forget + +#[no_mangle] +pub unsafe extern "C" fn prisma_create(options: ConstructorOptions, qe_ptr: *mut *mut QueryEngine) -> c_int { + if qe_ptr == std::ptr::null_mut() { + return PRISMA_MISSING_POINTER; + } + + let res = QueryEngine::new(options); + match res { + Ok(v) => { + *qe_ptr = Box::into_raw(Box::new(v)); + PRISMA_OK + } + Err(e) => { + println!("Error creating query engine {:#?}", e); + PRISMA_UNKNOWN_ERROR + } + } +} + +#[no_mangle] +pub unsafe extern "C" fn prisma_connect( + qe: *mut QueryEngine, + trace: *const c_char, + error_string_ptr: *mut *mut c_char, +) -> c_int { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.connect(trace).await }); + + match result { + Ok(_engine) => { + std::mem::forget(query_engine); + *error_string_ptr = std::ptr::null_mut(); + PRISMA_OK + } + Err(err) => { + RUNTIME.block_on(async { + let mut error_string = query_engine.error_string.write().await; + *error_string = CString::new(err.message()).unwrap(); + *error_string_ptr = error_string.as_ptr() as *mut c_char; + }); + std::mem::forget(query_engine); + PRISMA_UNKNOWN_ERROR + } + } +} + +#[no_mangle] +pub unsafe extern "C" fn prisma_query( + qe: *mut QueryEngine, + body_str: *const c_char, + header_str: *const c_char, + tx_id_str: *const c_char, + error_string_ptr: *mut *mut c_char, +) -> *const c_char { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.query(body_str, header_str, tx_id_str).await }); + match result { + Ok(query_result) => { + std::mem::forget(query_engine); + *error_string_ptr = std::ptr::null_mut(); + CString::new(query_result).unwrap().into_raw() + } + Err(err) => { + RUNTIME.block_on(async { + let mut error_string = query_engine.error_string.write().await; + *error_string = CString::new(err.message()).unwrap(); + *error_string_ptr = Box::into_raw(Box::new(error_string.as_ptr())) as *mut c_char; + }); + std::mem::forget(query_engine); + return null_mut(); + } + } +} + +#[no_mangle] +pub unsafe extern "C" fn prisma_start_transaction( + qe: *mut QueryEngine, + options_str: *const c_char, + header_str: *const c_char, +) -> *const c_char { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.start_transaction(options_str, header_str).await }); + match result { + Ok(query_result) => { + std::mem::forget(query_engine); + CString::new(query_result).unwrap().into_raw() + } + Err(_err) => { + std::mem::forget(query_engine); + return null_mut(); + } + } +} + +#[no_mangle] +pub unsafe extern "C" fn prisma_commit_transaction( + qe: *mut QueryEngine, + tx_id_str: *const c_char, + header_str: *const c_char, +) -> *const c_char { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.commit_transaction(tx_id_str, header_str).await }); + std::mem::forget(query_engine); + match result { + Ok(query_result) => CString::new(query_result).unwrap().into_raw(), + Err(_err) => { + return null_mut(); + } + } +} + +#[no_mangle] +pub unsafe extern "C" fn prisma_rollback_transaction( + qe: *mut QueryEngine, + tx_id_str: *const c_char, + header_str: *const c_char, +) -> *const c_char { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.rollback_transaction(tx_id_str, header_str).await }); + std::mem::forget(query_engine); + match result { + Ok(query_result) => CString::new(query_result).unwrap().into_raw(), + Err(_err) => { + return null_mut(); + } + } +} + +#[no_mangle] +pub unsafe extern "C" fn prisma_disconnect(qe: *mut QueryEngine, header_str: *const c_char) -> c_int { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.disconnect(header_str).await }); + std::mem::forget(query_engine); + match result { + Ok(_) => PRISMA_OK, + Err(_err) => { + return PRISMA_UNKNOWN_ERROR; + } + } +} + +#[no_mangle] +pub unsafe extern "C" fn prisma_apply_pending_migrations( + qe: *mut QueryEngine, + migration_folder_path: *const c_char, + error_string_ptr: *mut *mut c_char, +) -> c_int { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.apply_migrations(migration_folder_path).await }); + match result { + Ok(_) => { + std::mem::forget(query_engine); + *error_string_ptr = std::ptr::null_mut(); + PRISMA_OK + } + Err(err) => { + RUNTIME.block_on(async { + let mut error_string = query_engine.error_string.write().await; + *error_string = CString::new(err.message()).unwrap(); + *error_string_ptr = error_string.as_ptr() as *mut c_char; + }); + std::mem::forget(query_engine); + return PRISMA_UNKNOWN_ERROR; + } + } +} + +#[no_mangle] +pub unsafe extern "C" fn prisma_destroy(qe: *mut QueryEngine) -> c_int { + // Once the variable goes out of scope, it will be deallocated + let _query_engine: Box = Box::from_raw(qe); + PRISMA_OK +} diff --git a/query-engine/query-engine-c-abi/src/error.rs b/query-engine/query-engine-c-abi/src/error.rs new file mode 100644 index 000000000000..71ef513173fc --- /dev/null +++ b/query-engine/query-engine-c-abi/src/error.rs @@ -0,0 +1,103 @@ +use psl::diagnostics::Diagnostics; +use query_connector::error::ConnectorError; +use query_core::CoreError; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum ApiError { + #[error("{:?}", _0)] + Conversion(Diagnostics, String), + + #[error("{}", _0)] + Configuration(String), + + #[error("{}", _0)] + Core(CoreError), + + #[error("{}", _0)] + Connector(ConnectorError), + + #[error("Can't modify an already connected engine.")] + AlreadyConnected, + + #[error("Engine is not yet connected.")] + NotConnected, + + #[error("{}", _0)] + JsonDecode(String), +} + +impl From for user_facing_errors::Error { + fn from(err: ApiError) -> Self { + use std::fmt::Write as _; + + match err { + ApiError::Connector(ConnectorError { + user_facing_error: Some(err), + .. + }) => err.into(), + ApiError::Conversion(errors, dml_string) => { + let mut full_error = errors.to_pretty_string("schema.prisma", &dml_string); + write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); + + user_facing_errors::Error::from(user_facing_errors::KnownError::new( + user_facing_errors::common::SchemaParserError { full_error }, + )) + } + ApiError::Core(error) => user_facing_errors::Error::from(error), + other => user_facing_errors::Error::new_non_panic_with_current_backtrace(other.to_string()), + } + } +} + +impl ApiError { + pub fn conversion(diagnostics: Diagnostics, dml: impl ToString) -> Self { + Self::Conversion(diagnostics, dml.to_string()) + } + + pub fn configuration(msg: impl ToString) -> Self { + Self::Configuration(msg.to_string()) + } +} + +impl From for ApiError { + fn from(e: CoreError) -> Self { + match e { + CoreError::ConfigurationError(message) => Self::Configuration(message), + core_error => Self::Core(core_error), + } + } +} + +impl From for ApiError { + fn from(e: ConnectorError) -> Self { + Self::Connector(e) + } +} + +impl From for ApiError { + fn from(e: url::ParseError) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: connection_string::Error) -> Self { + Self::configuration(format!("Error parsing connection string: {e}")) + } +} + +impl From for ApiError { + fn from(e: serde_json::Error) -> Self { + Self::JsonDecode(format!("{e}")) + } +} + +// impl From for napi::Error { +// fn from(e: ApiError) -> Self { +// let user_facing = user_facing_errors::Error::from(e); +// let message = serde_json::to_string(&user_facing).unwrap(); + +// napi::Error::from_reason(message) +// } +// } diff --git a/query-engine/query-engine-c-abi/src/functions.rs b/query-engine/query-engine-c-abi/src/functions.rs new file mode 100644 index 000000000000..ec97a98e1f22 --- /dev/null +++ b/query-engine/query-engine-c-abi/src/functions.rs @@ -0,0 +1,40 @@ +// use request_handlers::dmmf; +// use std::sync::Arc; + +// use crate::error::ApiError; + +#[derive(serde::Serialize, Clone, Copy)] +pub struct Version { + pub commit: &'static str, + pub version: &'static str, +} + +pub fn version() -> Version { + Version { + commit: env!("GIT_HASH"), + version: env!("CARGO_PKG_VERSION"), + } +} + +// pub fn dmmf(datamodel_string: String) -> napi::Result { +// let mut schema = psl::validate(datamodel_string.into()); + +// schema +// .diagnostics +// .to_result() +// .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + +// let query_schema = query_core::schema::build(Arc::new(schema), true); +// let dmmf = dmmf::render_dmmf(&query_schema); + +// Ok(serde_json::to_string(&dmmf)?) +// } + +// pub fn debug_panic(panic_message: Option) { +// let user_facing = user_facing_errors::Error::from_panic_payload(Box::new( +// panic_message.unwrap_or_else(|| "query-engine-node-api debug panic".to_string()), +// )); +// let message = serde_json::to_string(&user_facing).unwrap(); + +// Err(napi::Error::from_reason(message)) +// } diff --git a/query-engine/query-engine-c-abi/src/lib.rs b/query-engine/query-engine-c-abi/src/lib.rs new file mode 100644 index 000000000000..9dcb1f74ea33 --- /dev/null +++ b/query-engine/query-engine-c-abi/src/lib.rs @@ -0,0 +1,9 @@ +pub mod engine; +pub mod error; +pub mod functions; +pub mod logger; +pub mod migrations; + +mod tracer; + +pub(crate) type Result = std::result::Result; diff --git a/query-engine/query-engine-c-abi/src/logger.rs b/query-engine/query-engine-c-abi/src/logger.rs new file mode 100644 index 000000000000..b6b0d7c06524 --- /dev/null +++ b/query-engine/query-engine-c-abi/src/logger.rs @@ -0,0 +1,174 @@ +use core::fmt; +use query_core::telemetry; +use query_engine_common::logger::StringCallback; +// use query_engine_metrics::MetricRegistry; +use serde_json::Value; +use std::collections::BTreeMap; +use std::sync::Arc; +use tracing::{ + field::{Field, Visit}, + level_filters::LevelFilter, + Dispatch, Level, Subscriber, +}; +use tracing_subscriber::{ + filter::{filter_fn, FilterExt}, + layer::SubscriberExt, + Layer, Registry, +}; + +pub(crate) type LogCallback = Box; + +pub(crate) struct Logger { + dispatcher: Dispatch, + // metrics: Option, +} + +impl Logger { + /// Creates a new logger using a call layer + pub fn new(log_queries: bool, log_level: LevelFilter, log_callback: LogCallback, enable_tracing: bool) -> Self { + let is_sql_query = filter_fn(|meta| { + meta.target() == "quaint::connector::metrics" && meta.fields().iter().any(|f| f.name() == "query") + }); + + // is a mongodb query? + let is_mongo_query = filter_fn(|meta| meta.target() == "mongodb_query_connector::query"); + + // We need to filter the messages to send to our callback logging mechanism + let filters = if log_queries { + // Filter trace query events (for query log) or based in the defined log level + is_sql_query.or(is_mongo_query).or(log_level).boxed() + } else { + // Filter based in the defined log level + FilterExt::boxed(log_level) + }; + + let log_callback = Arc::new(log_callback); + let callback_layer = Box::new(CallbackLayer::new(Arc::clone(&log_callback))); + + let is_user_trace = filter_fn(telemetry::helpers::user_facing_span_only_filter); + let tracer = crate::tracer::new_pipeline().install_simple(callback_layer); + let telemetry = if enable_tracing { + let telemetry = tracing_opentelemetry::layer() + .with_tracer(tracer) + .with_filter(is_user_trace); + Some(telemetry) + } else { + None + }; + + let layer = CallbackLayer::new(log_callback).with_filter(filters); + + // let metrics = if enable_metrics { + // query_engine_metrics::setup(); + // Some(MetricRegistry::new()) + // } else { + // None + // }; + + Self { + dispatcher: Dispatch::new(Registry::default().with(telemetry).with(layer)), + // metrics, + } + } + + pub fn dispatcher(&self) -> Dispatch { + self.dispatcher.clone() + } + + // pub fn metrics(&self) -> Option { + // self.metrics.clone() + // } +} + +pub struct JsonVisitor<'a> { + values: BTreeMap<&'a str, Value>, +} + +impl<'a> JsonVisitor<'a> { + pub fn new(level: &Level, target: &str) -> Self { + let mut values = BTreeMap::new(); + values.insert("level", serde_json::Value::from(level.to_string())); + + // NOTE: previous version used module_path, this is not correct and it should be _target_ + values.insert("module_path", serde_json::Value::from(target)); + + JsonVisitor { values } + } +} + +impl<'a> Visit for JsonVisitor<'a> { + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + match field.name() { + name if name.starts_with("r#") => { + self.values + .insert(&name[2..], serde_json::Value::from(format!("{value:?}"))); + } + name => { + self.values.insert(name, serde_json::Value::from(format!("{value:?}"))); + } + }; + } + + fn record_i64(&mut self, field: &Field, value: i64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_str(&mut self, field: &Field, value: &str) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } +} + +impl<'a> ToString for JsonVisitor<'a> { + fn to_string(&self) -> String { + serde_json::to_string(&self.values).unwrap() + } +} + +#[derive(Clone)] +pub(crate) struct CallbackLayer +where + F: Fn(String) -> () + 'static, +{ + callback: Arc, +} + +impl CallbackLayer +where + F: Fn(String) -> () + 'static, +{ + pub fn new(callback: Arc) -> Self { + CallbackLayer { callback } + } +} + +impl StringCallback for CallbackLayer +where + F: Fn(String) -> () + 'static, +{ + fn call(&self, message: String) -> Result<(), String> { + let callback = &self.callback; + callback(message); + Ok(()) + } +} + +// A tracing layer for sending logs to a js callback, layers are composable, subscribers are not. +impl Layer for CallbackLayer +where + S: Subscriber, + F: Fn(String) -> (), +{ + fn on_event(&self, event: &tracing::Event<'_>, _ctx: tracing_subscriber::layer::Context<'_, S>) { + let mut visitor = JsonVisitor::new(event.metadata().level(), event.metadata().target()); + event.record(&mut visitor); + _ = self.call(visitor.to_string()); + } +} diff --git a/query-engine/query-engine-c-abi/src/migrations.rs b/query-engine/query-engine-c-abi/src/migrations.rs new file mode 100644 index 000000000000..5f01717b4bbc --- /dev/null +++ b/query-engine/query-engine-c-abi/src/migrations.rs @@ -0,0 +1,191 @@ +use indoc::indoc; +use rusqlite::Connection; +use std::{ + fs::{read_dir, DirEntry}, + path::{Path, PathBuf}, +}; +use user_facing_errors::{Error, UnknownError}; + +pub type Timestamp = chrono::DateTime; + +// TODO there is a bunch of casting that is present, however it is not the most correct way +// but since this is an out of tree branch, I do not want to change the common libraries yet + +#[derive(Debug)] +pub struct MigrationDirectory { + path: PathBuf, +} + +impl MigrationDirectory { + /// Initialize a MigrationDirectory at the provided path. This will not + /// validate that the path is valid and exists. + pub fn new(path: PathBuf) -> MigrationDirectory { + MigrationDirectory { path } + } + + /// The `{timestamp}_{name}` formatted migration name. + pub fn migration_name(&self) -> &str { + self.path + .file_name() + .expect("MigrationDirectory::migration_id") + .to_str() + .expect("Migration directory name is not valid UTF-8.") + } + + /// Read the migration script to a string. + pub fn read_migration_script(&self) -> Result { + let path = self.path.join("migration.sql"); + std::fs::read_to_string(&path).map_err(|ioerr| UnknownError::new(&ioerr).into()) + } +} + +impl From for MigrationDirectory { + fn from(entry: DirEntry) -> MigrationDirectory { + MigrationDirectory { path: entry.path() } + } +} + +/// An applied migration, as returned by list_migrations. +#[derive(Debug, Clone)] +pub struct MigrationRecord { + /// A unique, randomly generated identifier. + pub id: String, + /// The timestamp at which the migration completed *successfully*. + pub finished_at: Option, + /// The name of the migration, i.e. the name of migration directory + /// containing the migration script. + pub migration_name: String, + /// The time the migration started being applied. + pub started_at: Timestamp, +} + +pub fn list_migration_dir(migrations_directory_path: &Path) -> Result, Error> { + let mut entries: Vec = Vec::new(); + + let read_dir_entries = match read_dir(migrations_directory_path) { + Ok(read_dir_entries) => read_dir_entries, + // Err(err) if matches!(err.kind(), std::io::ErrorKind::NotFound) => return Ok(entries), + Err(err) => return Err(UnknownError::new(&err).into()), + }; + + for entry in read_dir_entries { + let entry = entry.map_err(|err| UnknownError::new(&err))?; + + if entry.file_type().map_err(|err| UnknownError::new(&err))?.is_dir() { + entries.push(entry.into()); + } + } + + entries.sort_by(|a, b| a.migration_name().cmp(b.migration_name())); + + Ok(entries) +} + +// pub fn detect_failed_migrations(migrations_from_database: &[MigrationRecord]) -> Result<(), user_facing_errors::Error> { +// use std::fmt::Write as _; + +// tracing::debug!("Checking for failed migrations."); + +// let mut failed_migrations = migrations_from_database +// .iter() +// .filter(|migration| migration.finished_at.is_none() && migration.rolled_back_at.is_none()) +// .peekable(); + +// if failed_migrations.peek().is_none() { +// return Ok(()); +// } + +// let mut details = String::new(); + +// for failed_migration in failed_migrations { +// let logs = failed_migration +// .logs +// .as_deref() +// .map(|s| s.trim()) +// .filter(|s| !s.is_empty()) +// .map(|s| format!(" with the following logs:\n{s}")) +// .unwrap_or_default(); + +// writeln!( +// details, +// "The `{name}` migration started at {started_at} failed{logs}", +// name = failed_migration.migration_name, +// started_at = failed_migration.started_at, +// ) +// .unwrap(); +// } + +// // Err(user_facing(FoundFailedMigrations { details })) +// Err(user_facing_errors::Error::from( +// user_facing_errors::common::FoundFailedMigrations { details }, +// )) +// } + +pub fn list_migrations(database_filename: &Path) -> Result, Error> { + let conn = Connection::open(database_filename).map_err(|err| UnknownError::new(&err))?; + + // Check if the migrations table exists + let table_exists = conn + .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='_prisma_migrations'") + .and_then(|mut stmt| stmt.query_row([], |_| Ok(()))) + .is_ok(); + + // If the migrations table doesn't exist, create it + if !table_exists { + let sql = indoc! {r#" + CREATE TABLE "_prisma_migrations" ( + "id" TEXT PRIMARY KEY NOT NULL, + "finished_at" DATETIME, + "migration_name" TEXT NOT NULL, + "started_at" DATETIME NOT NULL DEFAULT current_timestamp + ); + "#}; + + conn.execute(sql, []).map_err(|err| UnknownError::new(&err))?; + } + + let mut stmt = conn + .prepare("SELECT id, migration_name, started_at, finished_at FROM _prisma_migrations") + .map_err(|err| UnknownError::new(&err))?; + let mut rows = stmt.query([]).map_err(|err| UnknownError::new(&err))?; + + let mut entries: Vec = Vec::new(); + + while let Some(row) = rows.next().unwrap() { + let id = row.get(0).unwrap(); + let migration_name: String = row.get(1).unwrap(); + let started_at: Timestamp = row.get(2).unwrap(); + let finished_at: Option = row.get(3).unwrap(); + + entries.push(MigrationRecord { + id, + migration_name, + started_at, + finished_at, + }); + } + + Ok(entries) +} + +pub fn record_migration_started(database_filename: &Path, migration_name: &str) -> Result<(), Error> { + let conn = Connection::open(database_filename).map_err(|err| UnknownError::new(&err))?; + + let sql = "INSERT INTO _prisma_migrations (id, migration_name) VALUES (?, ?)"; + conn.execute(sql, [uuid::Uuid::new_v4().to_string(), migration_name.to_owned()]) + .map_err(|err| UnknownError::new(&err))?; + + Ok(()) +} + +pub fn execute_migration_script(database_filename: &Path, migration_name: &str, script: &str) -> Result<(), Error> { + let conn = Connection::open(database_filename).map_err(|err| UnknownError::new(&err))?; + + conn.execute_batch(script).map_err(|err| UnknownError::new(&err))?; + + let sql = "UPDATE _prisma_migrations SET finished_at = current_timestamp WHERE migration_name = ?"; + conn.execute(sql, [migration_name]) + .map_err(|err| UnknownError::new(&err))?; + + Ok(()) +} diff --git a/query-engine/query-engine-c-abi/src/tracer.rs b/query-engine/query-engine-c-abi/src/tracer.rs new file mode 100644 index 000000000000..3bfae7b1e02d --- /dev/null +++ b/query-engine/query-engine-c-abi/src/tracer.rs @@ -0,0 +1 @@ +pub(crate) use query_engine_common::tracer::*; diff --git a/query-engine/query-engine-node-api/Cargo.toml b/query-engine/query-engine-node-api/Cargo.toml index 83997d887dee..e5233fea4c03 100644 --- a/query-engine/query-engine-node-api/Cargo.toml +++ b/query-engine/query-engine-node-api/Cargo.toml @@ -20,13 +20,17 @@ driver-adapters = [ anyhow = "1" async-trait.workspace = true query-core = { path = "../core", features = ["metrics"] } -request-handlers = { path = "../request-handlers", features = ["native"] } +request-handlers = { path = "../request-handlers", features = [ + "native", + "all", +] } query-connector = { path = "../connectors/query-connector" } query-engine-common = { path = "../../libs/query-engine-common" } user-facing-errors = { path = "../../libs/user-facing-errors" } psl = { workspace = true, features = ["all"] } sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector", features = [ - "native_all", + "native", + "all", ] } query-structure = { path = "../query-structure" } driver-adapters = { path = "../driver-adapters", features = [ diff --git a/query-engine/query-engine-wasm/example/.gitignore b/query-engine/query-engine-wasm/example/.gitignore new file mode 100644 index 000000000000..3997beadf829 --- /dev/null +++ b/query-engine/query-engine-wasm/example/.gitignore @@ -0,0 +1 @@ +*.db \ No newline at end of file diff --git a/query-engine/query-engine-wasm/example/example.js b/query-engine/query-engine-wasm/example/example.js new file mode 100644 index 000000000000..c320b442b777 --- /dev/null +++ b/query-engine/query-engine-wasm/example/example.js @@ -0,0 +1,84 @@ +/** + * Run with: `node --experimental-wasm-modules ./example.js` + * on Node.js 18+. + */ +import { readFile } from 'fs/promises' +import { PrismaLibSQL } from '@prisma/adapter-libsql' +import { createClient } from '@libsql/client' +import { bindAdapter } from '@prisma/driver-adapter-utils' +import { QueryEngine, getBuildTimeInfo } from '../pkg/query_engine.js' + + +async function main() { + // Always initialize the Wasm library before using it. + // This sets up the logging and panic hooks. + + const client = createClient({ url: "file:./prisma/dev.db"}) + const adapter = new PrismaLibSQL(client) + const driverAdapter = bindAdapter(adapter) + + console.log('buildTimeInfo', getBuildTimeInfo()) + + const datamodel = await readFile('prisma/schema.prisma', 'utf8') + + const options = { + datamodel, + logLevel: 'info', + logQueries: true, + datasourceOverrides: {}, + env: process.env, + configDir: '/tmp', + ignoreEnvVarErrors: true, + } + const callback = () => { console.log('log-callback') } + + const queryEngine = new QueryEngine(options, callback, driverAdapter) + + await queryEngine.connect('trace') + + const created = await queryEngine.query(JSON.stringify({ + modelName: 'User', + action: 'createOne', + query: { + arguments: { + data: { + id: 1235, + }, + }, + selection: { + $scalars: true + } + } + }), 'trace') + + console.log({ created }) + + const res = await queryEngine.query(JSON.stringify({ + modelName: 'User', + action: 'findMany', + query: { + arguments: {}, + selection: { + $scalars: true + } + } + }), 'trace') + const parsed = JSON.parse(res); + console.log('query result = ') + console.dir(parsed, { depth: null }) + + const error = parsed.errors?.[0]?.user_facing_error + if (error?.error_code === 'P2036') { + console.log('js error:', driverAdapter.errorRegistry.consumeError(error.meta.id)) + } + + // console.log('before disconnect') + await queryEngine.disconnect('trace') + // console.log('after disconnect') + + // console.log('before free') + queryEngine.free() + // console.log('after free') +} + +main() diff --git a/query-engine/query-engine-wasm/example/package.json b/query-engine/query-engine-wasm/example/package.json new file mode 100644 index 000000000000..372d561136bf --- /dev/null +++ b/query-engine/query-engine-wasm/example/package.json @@ -0,0 +1,14 @@ +{ + "type": "module", + "main": "./example.js", + "scripts": { + "dev": "node --experimental-wasm-modules ./example.js" + }, + "dependencies": { + "@libsql/client": "0.5.2", + "@prisma/adapter-libsql": "5.10.2", + "@prisma/client": "5.10.2", + "@prisma/driver-adapter-utils": "5.10.2", + "prisma": "5.10.2" + } +} diff --git a/query-engine/query-engine-wasm/example/pnpm-lock.yaml b/query-engine/query-engine-wasm/example/pnpm-lock.yaml new file mode 100644 index 000000000000..f498c0b042eb --- /dev/null +++ b/query-engine/query-engine-wasm/example/pnpm-lock.yaml @@ -0,0 +1,382 @@ +lockfileVersion: '6.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +dependencies: + '@libsql/client': + specifier: 0.5.2 + version: 0.5.2 + '@prisma/adapter-libsql': + specifier: 5.10.2 + version: 5.10.2(@libsql/client@0.5.2) + '@prisma/client': + specifier: 5.10.2 + version: 5.10.2(prisma@5.10.2) + '@prisma/driver-adapter-utils': + specifier: 5.10.2 + version: 5.10.2 + prisma: + specifier: 5.10.2 + version: 5.10.2 + +packages: + + /@libsql/client@0.5.2: + resolution: {integrity: sha512-aHnYjsqE4QWhb+HdJj2HtLw6QBt61veSu6IQgFO5rxzdY/rb69YAgYF0ZvpVoMn12B/t9U9U7H3ow/IADo4Yhg==} + dependencies: + '@libsql/core': 0.5.3 + '@libsql/hrana-client': 0.5.6 + js-base64: 3.7.5 + libsql: 0.3.8 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + dev: false + + /@libsql/core@0.5.3: + resolution: {integrity: sha512-vccnRnLIeru4hacfowXDZZRxYyFWN8Z6CSs+951rH7w9JOMzwmetn5IYsXw5VcOIf0P0aLa86Uhvl1MF8jM6pA==} + dependencies: + js-base64: 3.7.5 + dev: false + + /@libsql/darwin-arm64@0.3.8: + resolution: {integrity: sha512-uh9dfDsmx0NfBjJbFm8APPD8E5s18mxmmmuH4IdSTl/xdv9URAeYo8zv9s2SHgM62QbUUcokLDzLgFfOGSsFBA==} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@libsql/darwin-x64@0.3.8: + resolution: {integrity: sha512-+5CSFTMs86thuUJW2emzCqrZunueR4ilUV9J1HeZgUtSiQg32/z5GdCR0027JgALqB++yhFGY4WK4SNAPWdKaA==} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@libsql/hrana-client@0.5.6: + resolution: {integrity: sha512-mjQoAmejZ1atG+M3YR2ZW+rg6ceBByH/S/h17ZoYZkqbWrvohFhXyz2LFxj++ARMoY9m6w3RJJIRdJdmnEUlFg==} + dependencies: + '@libsql/isomorphic-fetch': 0.1.12 + '@libsql/isomorphic-ws': 0.1.5 + js-base64: 3.7.5 + node-fetch: 3.3.2 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + dev: false + + /@libsql/isomorphic-fetch@0.1.12: + resolution: {integrity: sha512-MRo4UcmjAGAa3ac56LoD5OE13m2p0lu0VEtZC2NZMcogM/jc5fU9YtMQ3qbPjFJ+u2BBjFZgMPkQaLS1dlMhpg==} + dependencies: + '@types/node-fetch': 2.6.11 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + dev: false + + /@libsql/isomorphic-ws@0.1.5: + resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} + dependencies: + '@types/ws': 8.5.10 + ws: 8.14.2 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + dev: false + + /@libsql/linux-arm64-gnu@0.3.8: + resolution: {integrity: sha512-s9blvMx2tA0HGnTHUhEtZZoBLoZqaTxVyjM4qFrxJO84GP902N/DXtbxO2ib6Jbs5rom+78DkpHmi7PzBDLCZA==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-arm64-musl@0.3.8: + resolution: {integrity: sha512-Gw+g5GbeAXdONzpmKVvvdIk/8cCjn0MeN8KNm59xbuwWnkA0NCz94UMD725xOoyl3z+olBxhAdE5yEznLSTcag==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-gnu@0.3.8: + resolution: {integrity: sha512-XRpzXlbM0ZvPVB8/bhun/4dhRUt4PBo1zTz0njaWo/EQoZNGQkps1IZv7v3wR40Kcug4qvmuXTCGuYPQN4QI7w==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/linux-x64-musl@0.3.8: + resolution: {integrity: sha512-gjqjqXpSBj3aB7Q2D0zgoYlquJr8WkPXaByjXE4XYNzcRRg6o+q3V3Uv9s6yhKBoLiBsltUETFJLCoQNzUv9kA==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@libsql/win32-x64-msvc@0.3.8: + resolution: {integrity: sha512-KbqqgbL2iBciVFZSJ//36U0Fr6P6AAcLpJPqVckRdNOC43whZlKNglmjtzQDOq3+UVieC8OkLUPEDShRIcSDZA==} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: false + optional: true + + /@neon-rs/load@0.0.4: + resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + requiresBuild: true + dev: false + + /@prisma/adapter-libsql@5.10.2(@libsql/client@0.5.2): + resolution: {integrity: sha512-XRaSK8IhmodBK3FAvlw0blwUVlIH9sEvUvJvHtGXKoMJDG9zb5HS/NkAqPVG7/8oqUZInZmdNlUXb/RGiROiFg==} + peerDependencies: + '@libsql/client': ^0.3.5 || ^0.4.0 + dependencies: + '@libsql/client': 0.5.2 + '@prisma/driver-adapter-utils': 5.10.2 + async-mutex: 0.4.1 + dev: false + + /@prisma/client@5.10.2(prisma@5.10.2): + resolution: {integrity: sha512-ef49hzB2yJZCvM5gFHMxSFL9KYrIP9udpT5rYo0CsHD4P9IKj473MbhU1gjKKftiwWBTIyrt9jukprzZXazyag==} + engines: {node: '>=16.13'} + requiresBuild: true + peerDependencies: + prisma: '*' + peerDependenciesMeta: + prisma: + optional: true + dependencies: + prisma: 5.10.2 + dev: false + + /@prisma/debug@5.10.2: + resolution: {integrity: sha512-bkBOmH9dpEBbMKFJj8V+Zp8IZHIBjy3fSyhLhxj4FmKGb/UBSt9doyfA6k1UeUREsMJft7xgPYBbHSOYBr8XCA==} + dev: false + + /@prisma/driver-adapter-utils@5.10.2: + resolution: {integrity: sha512-Qou/js8VJSmaWiGX5EVXGF83fMZltFnuzkKFOocpDvcI3f5G9WTPf61TKflzs3ZOYe1weRgM9hUk9UR7lgGEwg==} + dependencies: + '@prisma/debug': 5.10.2 + dev: false + + /@prisma/engines-version@5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9: + resolution: {integrity: sha512-uCy/++3Jx/O3ufM+qv2H1L4tOemTNqcP/gyEVOlZqTpBvYJUe0tWtW0y3o2Ueq04mll4aM5X3f6ugQftOSLdFQ==} + dev: false + + /@prisma/engines@5.10.2: + resolution: {integrity: sha512-HkSJvix6PW8YqEEt3zHfCYYJY69CXsNdhU+wna+4Y7EZ+AwzeupMnUThmvaDA7uqswiHkgm5/SZ6/4CStjaGmw==} + requiresBuild: true + dependencies: + '@prisma/debug': 5.10.2 + '@prisma/engines-version': 5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9 + '@prisma/fetch-engine': 5.10.2 + '@prisma/get-platform': 5.10.2 + dev: false + + /@prisma/fetch-engine@5.10.2: + resolution: {integrity: sha512-dSmXcqSt6DpTmMaLQ9K8ZKzVAMH3qwGCmYEZr/uVnzVhxRJ1EbT/w2MMwIdBNq1zT69Rvh0h75WMIi0mrIw7Hg==} + dependencies: + '@prisma/debug': 5.10.2 + '@prisma/engines-version': 5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9 + '@prisma/get-platform': 5.10.2 + dev: false + + /@prisma/get-platform@5.10.2: + resolution: {integrity: sha512-nqXP6vHiY2PIsebBAuDeWiUYg8h8mfjBckHh6Jezuwej0QJNnjDiOq30uesmg+JXxGk99nqyG3B7wpcOODzXvg==} + dependencies: + '@prisma/debug': 5.10.2 + dev: false + + /@types/node-fetch@2.6.11: + resolution: {integrity: sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==} + dependencies: + '@types/node': 20.9.4 + form-data: 4.0.0 + dev: false + + /@types/node@20.9.4: + resolution: {integrity: sha512-wmyg8HUhcn6ACjsn8oKYjkN/zUzQeNtMy44weTJSM6p4MMzEOuKbA3OjJ267uPCOW7Xex9dyrNTful8XTQYoDA==} + dependencies: + undici-types: 5.26.5 + dev: false + + /@types/ws@8.5.10: + resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==} + dependencies: + '@types/node': 20.9.4 + dev: false + + /async-mutex@0.4.1: + resolution: {integrity: sha512-WfoBo4E/TbCX1G95XTjbWTE3X2XLG0m1Xbv2cwOtuPdyH9CZvnaA5nCt1ucjaKEgW2A5IF71hxrRhr83Je5xjA==} + dependencies: + tslib: 2.6.2 + dev: false + + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false + + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: false + + /data-uri-to-buffer@4.0.1: + resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} + engines: {node: '>= 12'} + dev: false + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: false + + /detect-libc@2.0.2: + resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} + engines: {node: '>=8'} + requiresBuild: true + dev: false + + /fetch-blob@3.2.0: + resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} + engines: {node: ^12.20 || >= 14.13} + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 3.2.1 + dev: false + + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + + /formdata-polyfill@4.0.10: + resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} + engines: {node: '>=12.20.0'} + dependencies: + fetch-blob: 3.2.0 + dev: false + + /js-base64@3.7.5: + resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} + dev: false + + /libsql@0.3.8: + resolution: {integrity: sha512-tz12gCfDXl6WKwtpxpw6PaZtkecHQQQTHuuj6RLQvEfOB17bPpmo8xdC55S4J6fx6qzmqJbaLZSlA6gYJgUXkg==} + cpu: [x64, arm64] + os: [darwin, linux, win32] + dependencies: + '@neon-rs/load': 0.0.4 + detect-libc: 2.0.2 + optionalDependencies: + '@libsql/darwin-arm64': 0.3.8 + '@libsql/darwin-x64': 0.3.8 + '@libsql/linux-arm64-gnu': 0.3.8 + '@libsql/linux-arm64-musl': 0.3.8 + '@libsql/linux-x64-gnu': 0.3.8 + '@libsql/linux-x64-musl': 0.3.8 + '@libsql/win32-x64-msvc': 0.3.8 + dev: false + + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: false + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: false + + /node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + dev: false + + /node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: false + + /node-fetch@3.3.2: + resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + data-uri-to-buffer: 4.0.1 + fetch-blob: 3.2.0 + formdata-polyfill: 4.0.10 + dev: false + + /prisma@5.10.2: + resolution: {integrity: sha512-hqb/JMz9/kymRE25pMWCxkdyhbnIWrq+h7S6WysJpdnCvhstbJSNP/S6mScEcqiB8Qv2F+0R3yG+osRaWqZacQ==} + engines: {node: '>=16.13'} + hasBin: true + requiresBuild: true + dependencies: + '@prisma/engines': 5.10.2 + dev: false + + /tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: false + + /tslib@2.6.2: + resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} + dev: false + + /undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + dev: false + + /web-streams-polyfill@3.2.1: + resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} + engines: {node: '>= 8'} + dev: false + + /webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: false + + /whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + dev: false + + /ws@8.14.2: + resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + dev: false diff --git a/query-engine/query-engine-wasm/example/prisma/schema.prisma b/query-engine/query-engine-wasm/example/prisma/schema.prisma new file mode 100644 index 000000000000..c6432a4a671f --- /dev/null +++ b/query-engine/query-engine-wasm/example/prisma/schema.prisma @@ -0,0 +1,13 @@ +datasource db { + provider = "sqlite" + url = "file:./dev.db" +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters", "tracing"] +} + +model User { + id Int @id @default(autoincrement()) +} diff --git a/query-engine/query-engine/Cargo.toml b/query-engine/query-engine/Cargo.toml index f0f41fcbe4f0..72ff363e5ada 100644 --- a/query-engine/query-engine/Cargo.toml +++ b/query-engine/query-engine/Cargo.toml @@ -6,7 +6,7 @@ version = "0.1.0" [features] default = ["sql", "mongodb"] mongodb = ["mongodb-connector"] -sql = ["sql-connector", "sql-connector/native_all"] +sql = ["sql-connector", "sql-connector/all", "sql-connector/native"] vendored-openssl = ["sql-connector/vendored-openssl"] [dependencies] @@ -21,7 +21,10 @@ psl = { workspace = true, features = ["all"] } graphql-parser = { git = "https://github.com/prisma/graphql-parser" } mongodb-connector = { path = "../connectors/mongodb-query-connector", optional = true, package = "mongodb-query-connector" } query-core = { path = "../core", features = ["metrics"] } -request-handlers = { path = "../request-handlers", features = ["native"] } +request-handlers = { path = "../request-handlers", features = [ + "native", + "all", +] } serde.workspace = true serde_json.workspace = true sql-connector = { path = "../connectors/sql-query-connector", optional = true, package = "sql-query-connector" } diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index 8c2277948193..28161f7f33a7 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -37,13 +37,15 @@ sql = ["sql-query-connector"] postgresql = ["sql", "sql-query-connector/postgresql", "psl/postgresql"] mysql = ["sql", "sql-query-connector/mysql", "psl/mysql"] sqlite = ["sql", "sql-query-connector/sqlite", "psl/sqlite"] +cockroachdb = ["sql", "sql-query-connector/postgresql", "psl/cockroachdb"] driver-adapters = ["sql-query-connector/driver-adapters"] -native = [ +native = ["sql-query-connector/native"] +all = [ "mongodb", "sql", "graphql-protocol", + "sql-query-connector/all", "psl/all", - "sql-query-connector/native_all", "query-core/metrics", ] graphql-protocol = ["query-core/graphql-protocol", "dep:graphql-parser"] diff --git a/query-engine/request-handlers/src/load_executor.rs b/query-engine/request-handlers/src/load_executor.rs index 6cb112383f41..ded539fead50 100644 --- a/query-engine/request-handlers/src/load_executor.rs +++ b/query-engine/request-handlers/src/load_executor.rs @@ -43,12 +43,16 @@ pub async fn load( } match datasource.active_provider { + #[cfg(feature = "sqlite")] p if SQLITE.is_provider(p) => native::sqlite(datasource, &url, features).await, + #[cfg(feature = "mysql")] p if MYSQL.is_provider(p) => native::mysql(datasource, &url, features).await, + #[cfg(feature = "postgresql")] p if POSTGRES.is_provider(p) => native::postgres(datasource, &url, features).await, + #[cfg(feature = "mssql")] p if MSSQL.is_provider(p) => native::mssql(datasource, &url, features).await, + #[cfg(feature = "cockroach")] p if COCKROACH.is_provider(p) => native::postgres(datasource, &url, features).await, - #[cfg(feature = "mongodb")] p if MONGODB.is_provider(p) => native::mongodb(datasource, &url, features).await, @@ -87,6 +91,7 @@ mod native { Ok(executor_for(sqlite, false)) } + #[cfg(feature = "postgresql")] pub(crate) async fn postgres( source: &Datasource, url: &str, @@ -109,6 +114,7 @@ mod native { Ok(executor_for(psql, force_transactions)) } + #[cfg(feature = "mysql")] pub(crate) async fn mysql( source: &Datasource, url: &str, @@ -119,6 +125,7 @@ mod native { Ok(executor_for(mysql, false)) } + #[cfg(feature = "mssql")] pub(crate) async fn mssql( source: &Datasource, url: &str, diff --git a/renovate.json b/renovate.json index 83ea8d3b2950..89da07ea51fb 100644 --- a/renovate.json +++ b/renovate.json @@ -18,9 +18,6 @@ "rangeStrategy": "pin", "separateMinorPatch": true, "configMigration": true, - "ignoreDeps": [ - "query-engine-wasm-baseline", - ], "packageRules": [ { "matchFileNames": [ diff --git a/schema-engine/core/src/lib.rs b/schema-engine/core/src/lib.rs index 3ca75a596de0..64a1fb3daaa0 100644 --- a/schema-engine/core/src/lib.rs +++ b/schema-engine/core/src/lib.rs @@ -13,7 +13,7 @@ pub mod commands; mod api; mod core_error; mod rpc; -mod state; +pub mod state; mod timings; pub use self::{api::GenericApi, core_error::*, rpc::rpc_api, timings::TimingsLayer}; diff --git a/schema-engine/core/src/state.rs b/schema-engine/core/src/state.rs index 9143ef1fb767..eb7894535c3d 100644 --- a/schema-engine/core/src/state.rs +++ b/schema-engine/core/src/state.rs @@ -18,7 +18,7 @@ use tracing_futures::Instrument; /// `connectors`. Each connector has its own async task, and communicates with the core through /// channels. That ensures that each connector is handling requests one at a time to avoid /// synchronization issues. You can think of it in terms of the actor model. -pub(crate) struct EngineState { +pub struct EngineState { initial_datamodel: Option, host: Arc, // A map from either: @@ -41,7 +41,8 @@ type ErasedConnectorRequest = Box< >; impl EngineState { - pub(crate) fn new(initial_datamodel: Option, host: Option>) -> Self { + /// TODO + pub fn new(initial_datamodel: Option, host: Option>) -> Self { EngineState { initial_datamodel: initial_datamodel.map(|s| psl::validate(s.into())), host: host.unwrap_or_else(|| Arc::new(schema_connector::EmptyHost)), @@ -113,7 +114,12 @@ impl EngineState { response_receiver.await.expect("receiver boomed") } - async fn with_connector_for_url(&self, url: String, f: ConnectorRequest) -> CoreResult { + /// TODO + pub async fn with_connector_for_url( + &self, + url: String, + f: ConnectorRequest, + ) -> CoreResult { let (response_sender, response_receiver) = tokio::sync::oneshot::channel::>(); let erased: ErasedConnectorRequest = Box::new(move |connector| { Box::pin(async move { From 3b0948df6603a43ea03f42ac078d32a11c469899 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 15 Mar 2024 09:57:37 +0100 Subject: [PATCH 02/45] Start undoing merge problems --- libs/user-facing-errors/src/common.rs | 10 ---------- .../query-tests-setup/src/config.rs | 5 ++--- query-engine/driver-adapters/executor/package.json | 8 ++++---- query-engine/driver-adapters/executor/src/testd.ts | 6 +++--- 4 files changed, 9 insertions(+), 20 deletions(-) diff --git a/libs/user-facing-errors/src/common.rs b/libs/user-facing-errors/src/common.rs index 412d23620fd8..8976889e6114 100644 --- a/libs/user-facing-errors/src/common.rs +++ b/libs/user-facing-errors/src/common.rs @@ -239,16 +239,6 @@ pub struct UnsupportedFeatureError { pub message: String, } -#[derive(Debug, Serialize, UserFacingError)] -#[user_facing( - code = "P3009", - message = "migrate found failed migrations in the target database, new migrations will not be applied. Read more about how to resolve migration issues in a production database: https://pris.ly/d/migrate-resolve\n{details}" -)] -pub struct FoundFailedMigrations { - /// The details about each failed migration. - pub details: String, -} - #[cfg(test)] mod tests { use super::*; diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index 625dd14d333d..233bbca4a236 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -164,10 +164,9 @@ impl TestConfig { fn from_file() -> Option { let current_dir = env::current_dir().ok(); - let result = current_dir + current_dir .and_then(|path| Self::try_path(config_path(path))) - .or_else(|| Self::workspace_root().and_then(|path| Self::try_path(config_path(path)))); - result + .or_else(|| Self::workspace_root().and_then(|path| Self::try_path(config_path(path)))) } fn try_path(path: PathBuf) -> Option { diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index 6942e0ed36f5..b30b752c02cb 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -30,12 +30,12 @@ "@prisma/adapter-planetscale": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", "@prisma/bundled-js-drivers": "workspace:*", - "mitata": "^0.1.6", - "undici": "6.6.2", - "ws": "8.14.2" + "mitata": "0.1.11", + "undici": "6.7.0", + "ws": "8.16.0" }, "devDependencies": { - "@types/node": "20.10.8", + "@types/node": "20.11.24", "tsup": "8.0.2", "typescript": "5.3.3" } diff --git a/query-engine/driver-adapters/executor/src/testd.ts b/query-engine/driver-adapters/executor/src/testd.ts index cc9a3de3cf4c..67c11c333426 100644 --- a/query-engine/driver-adapters/executor/src/testd.ts +++ b/query-engine/driver-adapters/executor/src/testd.ts @@ -113,8 +113,8 @@ async function handleRequest(method: string, params: unknown): Promise txId?: string } - debug("🔷 Test query params") - debug('\x1b[36m', JSON.stringify(params, null, 2), '\x1b[0m'); + // debug("🔷 Test query params") + // debug('\x1b[36m', JSON.stringify(params, null, 2), '\x1b[0m'); const castParams = params as QueryPayload; const engine = state[castParams.schemaId].engine const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId) @@ -132,7 +132,7 @@ async function handleRequest(method: string, params: unknown): Promise } } - debug("🟢 Engine response: ", result) + // debug("🟢 Engine response: ", result) // returning unparsed string: otherwise, some information gots lost during this round-trip. // In particular, floating point without decimal part turn into integers return result From ece7da33e88b06ccbac0695a5fd74e2ff106bc82 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 15 Mar 2024 10:00:47 +0100 Subject: [PATCH 03/45] Undo merge frocks --- query-engine/driver-adapters/package.json | 6 +++--- query-engine/query-engine-c-abi/.gitignore | 3 --- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json index d6203e262d3d..6682ebf08ac6 100644 --- a/query-engine/driver-adapters/package.json +++ b/query-engine/driver-adapters/package.json @@ -16,10 +16,10 @@ "keywords": [], "author": "", "devDependencies": { - "@types/node": "20.10.8", - "esbuild": "0.19.12", + "@types/node": "20.11.24", + "esbuild": "0.20.1", "tsup": "8.0.2", - "tsx": "^4.7.0", + "tsx": "4.7.1", "typescript": "5.3.3" } } diff --git a/query-engine/query-engine-c-abi/.gitignore b/query-engine/query-engine-c-abi/.gitignore index c41332e611c8..2974fad5812d 100644 --- a/query-engine/query-engine-c-abi/.gitignore +++ b/query-engine/query-engine-c-abi/.gitignore @@ -2,9 +2,6 @@ QueryEngine.xcframework simulator_fat # Artifacts of the C ABI engine *.tar.gz -openssl-1.1.1w -openssl-3.2.0 openssl-3.1.4 -rust-openssl libs include \ No newline at end of file From ba23979b9919ddf2d93f36f27cf65ad444ed3519 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 15 Mar 2024 10:05:23 +0100 Subject: [PATCH 04/45] Undo merge frocks --- .../query-engine-wasm/example/.gitignore | 1 - .../query-engine-wasm/example/example.js | 84 ---- .../query-engine-wasm/example/package.json | 14 - .../query-engine-wasm/example/pnpm-lock.yaml | 382 ------------------ .../example/prisma/schema.prisma | 13 - renovate.json | 3 + 6 files changed, 3 insertions(+), 494 deletions(-) delete mode 100644 query-engine/query-engine-wasm/example/.gitignore delete mode 100644 query-engine/query-engine-wasm/example/example.js delete mode 100644 query-engine/query-engine-wasm/example/package.json delete mode 100644 query-engine/query-engine-wasm/example/pnpm-lock.yaml delete mode 100644 query-engine/query-engine-wasm/example/prisma/schema.prisma diff --git a/query-engine/query-engine-wasm/example/.gitignore b/query-engine/query-engine-wasm/example/.gitignore deleted file mode 100644 index 3997beadf829..000000000000 --- a/query-engine/query-engine-wasm/example/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.db \ No newline at end of file diff --git a/query-engine/query-engine-wasm/example/example.js b/query-engine/query-engine-wasm/example/example.js deleted file mode 100644 index c320b442b777..000000000000 --- a/query-engine/query-engine-wasm/example/example.js +++ /dev/null @@ -1,84 +0,0 @@ -/** - * Run with: `node --experimental-wasm-modules ./example.js` - * on Node.js 18+. - */ -import { readFile } from 'fs/promises' -import { PrismaLibSQL } from '@prisma/adapter-libsql' -import { createClient } from '@libsql/client' -import { bindAdapter } from '@prisma/driver-adapter-utils' -import { QueryEngine, getBuildTimeInfo } from '../pkg/query_engine.js' - - -async function main() { - // Always initialize the Wasm library before using it. - // This sets up the logging and panic hooks. - - const client = createClient({ url: "file:./prisma/dev.db"}) - const adapter = new PrismaLibSQL(client) - const driverAdapter = bindAdapter(adapter) - - console.log('buildTimeInfo', getBuildTimeInfo()) - - const datamodel = await readFile('prisma/schema.prisma', 'utf8') - - const options = { - datamodel, - logLevel: 'info', - logQueries: true, - datasourceOverrides: {}, - env: process.env, - configDir: '/tmp', - ignoreEnvVarErrors: true, - } - const callback = () => { console.log('log-callback') } - - const queryEngine = new QueryEngine(options, callback, driverAdapter) - - await queryEngine.connect('trace') - - const created = await queryEngine.query(JSON.stringify({ - modelName: 'User', - action: 'createOne', - query: { - arguments: { - data: { - id: 1235, - }, - }, - selection: { - $scalars: true - } - } - }), 'trace') - - console.log({ created }) - - const res = await queryEngine.query(JSON.stringify({ - modelName: 'User', - action: 'findMany', - query: { - arguments: {}, - selection: { - $scalars: true - } - } - }), 'trace') - const parsed = JSON.parse(res); - console.log('query result = ') - console.dir(parsed, { depth: null }) - - const error = parsed.errors?.[0]?.user_facing_error - if (error?.error_code === 'P2036') { - console.log('js error:', driverAdapter.errorRegistry.consumeError(error.meta.id)) - } - - // console.log('before disconnect') - await queryEngine.disconnect('trace') - // console.log('after disconnect') - - // console.log('before free') - queryEngine.free() - // console.log('after free') -} - -main() diff --git a/query-engine/query-engine-wasm/example/package.json b/query-engine/query-engine-wasm/example/package.json deleted file mode 100644 index 372d561136bf..000000000000 --- a/query-engine/query-engine-wasm/example/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "type": "module", - "main": "./example.js", - "scripts": { - "dev": "node --experimental-wasm-modules ./example.js" - }, - "dependencies": { - "@libsql/client": "0.5.2", - "@prisma/adapter-libsql": "5.10.2", - "@prisma/client": "5.10.2", - "@prisma/driver-adapter-utils": "5.10.2", - "prisma": "5.10.2" - } -} diff --git a/query-engine/query-engine-wasm/example/pnpm-lock.yaml b/query-engine/query-engine-wasm/example/pnpm-lock.yaml deleted file mode 100644 index f498c0b042eb..000000000000 --- a/query-engine/query-engine-wasm/example/pnpm-lock.yaml +++ /dev/null @@ -1,382 +0,0 @@ -lockfileVersion: '6.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -dependencies: - '@libsql/client': - specifier: 0.5.2 - version: 0.5.2 - '@prisma/adapter-libsql': - specifier: 5.10.2 - version: 5.10.2(@libsql/client@0.5.2) - '@prisma/client': - specifier: 5.10.2 - version: 5.10.2(prisma@5.10.2) - '@prisma/driver-adapter-utils': - specifier: 5.10.2 - version: 5.10.2 - prisma: - specifier: 5.10.2 - version: 5.10.2 - -packages: - - /@libsql/client@0.5.2: - resolution: {integrity: sha512-aHnYjsqE4QWhb+HdJj2HtLw6QBt61veSu6IQgFO5rxzdY/rb69YAgYF0ZvpVoMn12B/t9U9U7H3ow/IADo4Yhg==} - dependencies: - '@libsql/core': 0.5.3 - '@libsql/hrana-client': 0.5.6 - js-base64: 3.7.5 - libsql: 0.3.8 - transitivePeerDependencies: - - bufferutil - - encoding - - utf-8-validate - dev: false - - /@libsql/core@0.5.3: - resolution: {integrity: sha512-vccnRnLIeru4hacfowXDZZRxYyFWN8Z6CSs+951rH7w9JOMzwmetn5IYsXw5VcOIf0P0aLa86Uhvl1MF8jM6pA==} - dependencies: - js-base64: 3.7.5 - dev: false - - /@libsql/darwin-arm64@0.3.8: - resolution: {integrity: sha512-uh9dfDsmx0NfBjJbFm8APPD8E5s18mxmmmuH4IdSTl/xdv9URAeYo8zv9s2SHgM62QbUUcokLDzLgFfOGSsFBA==} - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: false - optional: true - - /@libsql/darwin-x64@0.3.8: - resolution: {integrity: sha512-+5CSFTMs86thuUJW2emzCqrZunueR4ilUV9J1HeZgUtSiQg32/z5GdCR0027JgALqB++yhFGY4WK4SNAPWdKaA==} - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: false - optional: true - - /@libsql/hrana-client@0.5.6: - resolution: {integrity: sha512-mjQoAmejZ1atG+M3YR2ZW+rg6ceBByH/S/h17ZoYZkqbWrvohFhXyz2LFxj++ARMoY9m6w3RJJIRdJdmnEUlFg==} - dependencies: - '@libsql/isomorphic-fetch': 0.1.12 - '@libsql/isomorphic-ws': 0.1.5 - js-base64: 3.7.5 - node-fetch: 3.3.2 - transitivePeerDependencies: - - bufferutil - - encoding - - utf-8-validate - dev: false - - /@libsql/isomorphic-fetch@0.1.12: - resolution: {integrity: sha512-MRo4UcmjAGAa3ac56LoD5OE13m2p0lu0VEtZC2NZMcogM/jc5fU9YtMQ3qbPjFJ+u2BBjFZgMPkQaLS1dlMhpg==} - dependencies: - '@types/node-fetch': 2.6.11 - node-fetch: 2.7.0 - transitivePeerDependencies: - - encoding - dev: false - - /@libsql/isomorphic-ws@0.1.5: - resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} - dependencies: - '@types/ws': 8.5.10 - ws: 8.14.2 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - dev: false - - /@libsql/linux-arm64-gnu@0.3.8: - resolution: {integrity: sha512-s9blvMx2tA0HGnTHUhEtZZoBLoZqaTxVyjM4qFrxJO84GP902N/DXtbxO2ib6Jbs5rom+78DkpHmi7PzBDLCZA==} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@libsql/linux-arm64-musl@0.3.8: - resolution: {integrity: sha512-Gw+g5GbeAXdONzpmKVvvdIk/8cCjn0MeN8KNm59xbuwWnkA0NCz94UMD725xOoyl3z+olBxhAdE5yEznLSTcag==} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@libsql/linux-x64-gnu@0.3.8: - resolution: {integrity: sha512-XRpzXlbM0ZvPVB8/bhun/4dhRUt4PBo1zTz0njaWo/EQoZNGQkps1IZv7v3wR40Kcug4qvmuXTCGuYPQN4QI7w==} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@libsql/linux-x64-musl@0.3.8: - resolution: {integrity: sha512-gjqjqXpSBj3aB7Q2D0zgoYlquJr8WkPXaByjXE4XYNzcRRg6o+q3V3Uv9s6yhKBoLiBsltUETFJLCoQNzUv9kA==} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: false - optional: true - - /@libsql/win32-x64-msvc@0.3.8: - resolution: {integrity: sha512-KbqqgbL2iBciVFZSJ//36U0Fr6P6AAcLpJPqVckRdNOC43whZlKNglmjtzQDOq3+UVieC8OkLUPEDShRIcSDZA==} - cpu: [x64] - os: [win32] - requiresBuild: true - dev: false - optional: true - - /@neon-rs/load@0.0.4: - resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} - requiresBuild: true - dev: false - - /@prisma/adapter-libsql@5.10.2(@libsql/client@0.5.2): - resolution: {integrity: sha512-XRaSK8IhmodBK3FAvlw0blwUVlIH9sEvUvJvHtGXKoMJDG9zb5HS/NkAqPVG7/8oqUZInZmdNlUXb/RGiROiFg==} - peerDependencies: - '@libsql/client': ^0.3.5 || ^0.4.0 - dependencies: - '@libsql/client': 0.5.2 - '@prisma/driver-adapter-utils': 5.10.2 - async-mutex: 0.4.1 - dev: false - - /@prisma/client@5.10.2(prisma@5.10.2): - resolution: {integrity: sha512-ef49hzB2yJZCvM5gFHMxSFL9KYrIP9udpT5rYo0CsHD4P9IKj473MbhU1gjKKftiwWBTIyrt9jukprzZXazyag==} - engines: {node: '>=16.13'} - requiresBuild: true - peerDependencies: - prisma: '*' - peerDependenciesMeta: - prisma: - optional: true - dependencies: - prisma: 5.10.2 - dev: false - - /@prisma/debug@5.10.2: - resolution: {integrity: sha512-bkBOmH9dpEBbMKFJj8V+Zp8IZHIBjy3fSyhLhxj4FmKGb/UBSt9doyfA6k1UeUREsMJft7xgPYBbHSOYBr8XCA==} - dev: false - - /@prisma/driver-adapter-utils@5.10.2: - resolution: {integrity: sha512-Qou/js8VJSmaWiGX5EVXGF83fMZltFnuzkKFOocpDvcI3f5G9WTPf61TKflzs3ZOYe1weRgM9hUk9UR7lgGEwg==} - dependencies: - '@prisma/debug': 5.10.2 - dev: false - - /@prisma/engines-version@5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9: - resolution: {integrity: sha512-uCy/++3Jx/O3ufM+qv2H1L4tOemTNqcP/gyEVOlZqTpBvYJUe0tWtW0y3o2Ueq04mll4aM5X3f6ugQftOSLdFQ==} - dev: false - - /@prisma/engines@5.10.2: - resolution: {integrity: sha512-HkSJvix6PW8YqEEt3zHfCYYJY69CXsNdhU+wna+4Y7EZ+AwzeupMnUThmvaDA7uqswiHkgm5/SZ6/4CStjaGmw==} - requiresBuild: true - dependencies: - '@prisma/debug': 5.10.2 - '@prisma/engines-version': 5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9 - '@prisma/fetch-engine': 5.10.2 - '@prisma/get-platform': 5.10.2 - dev: false - - /@prisma/fetch-engine@5.10.2: - resolution: {integrity: sha512-dSmXcqSt6DpTmMaLQ9K8ZKzVAMH3qwGCmYEZr/uVnzVhxRJ1EbT/w2MMwIdBNq1zT69Rvh0h75WMIi0mrIw7Hg==} - dependencies: - '@prisma/debug': 5.10.2 - '@prisma/engines-version': 5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9 - '@prisma/get-platform': 5.10.2 - dev: false - - /@prisma/get-platform@5.10.2: - resolution: {integrity: sha512-nqXP6vHiY2PIsebBAuDeWiUYg8h8mfjBckHh6Jezuwej0QJNnjDiOq30uesmg+JXxGk99nqyG3B7wpcOODzXvg==} - dependencies: - '@prisma/debug': 5.10.2 - dev: false - - /@types/node-fetch@2.6.11: - resolution: {integrity: sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==} - dependencies: - '@types/node': 20.9.4 - form-data: 4.0.0 - dev: false - - /@types/node@20.9.4: - resolution: {integrity: sha512-wmyg8HUhcn6ACjsn8oKYjkN/zUzQeNtMy44weTJSM6p4MMzEOuKbA3OjJ267uPCOW7Xex9dyrNTful8XTQYoDA==} - dependencies: - undici-types: 5.26.5 - dev: false - - /@types/ws@8.5.10: - resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==} - dependencies: - '@types/node': 20.9.4 - dev: false - - /async-mutex@0.4.1: - resolution: {integrity: sha512-WfoBo4E/TbCX1G95XTjbWTE3X2XLG0m1Xbv2cwOtuPdyH9CZvnaA5nCt1ucjaKEgW2A5IF71hxrRhr83Je5xjA==} - dependencies: - tslib: 2.6.2 - dev: false - - /asynckit@0.4.0: - resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} - dev: false - - /combined-stream@1.0.8: - resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} - engines: {node: '>= 0.8'} - dependencies: - delayed-stream: 1.0.0 - dev: false - - /data-uri-to-buffer@4.0.1: - resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} - engines: {node: '>= 12'} - dev: false - - /delayed-stream@1.0.0: - resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} - engines: {node: '>=0.4.0'} - dev: false - - /detect-libc@2.0.2: - resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} - engines: {node: '>=8'} - requiresBuild: true - dev: false - - /fetch-blob@3.2.0: - resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} - engines: {node: ^12.20 || >= 14.13} - dependencies: - node-domexception: 1.0.0 - web-streams-polyfill: 3.2.1 - dev: false - - /form-data@4.0.0: - resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} - engines: {node: '>= 6'} - dependencies: - asynckit: 0.4.0 - combined-stream: 1.0.8 - mime-types: 2.1.35 - dev: false - - /formdata-polyfill@4.0.10: - resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} - engines: {node: '>=12.20.0'} - dependencies: - fetch-blob: 3.2.0 - dev: false - - /js-base64@3.7.5: - resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} - dev: false - - /libsql@0.3.8: - resolution: {integrity: sha512-tz12gCfDXl6WKwtpxpw6PaZtkecHQQQTHuuj6RLQvEfOB17bPpmo8xdC55S4J6fx6qzmqJbaLZSlA6gYJgUXkg==} - cpu: [x64, arm64] - os: [darwin, linux, win32] - dependencies: - '@neon-rs/load': 0.0.4 - detect-libc: 2.0.2 - optionalDependencies: - '@libsql/darwin-arm64': 0.3.8 - '@libsql/darwin-x64': 0.3.8 - '@libsql/linux-arm64-gnu': 0.3.8 - '@libsql/linux-arm64-musl': 0.3.8 - '@libsql/linux-x64-gnu': 0.3.8 - '@libsql/linux-x64-musl': 0.3.8 - '@libsql/win32-x64-msvc': 0.3.8 - dev: false - - /mime-db@1.52.0: - resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} - engines: {node: '>= 0.6'} - dev: false - - /mime-types@2.1.35: - resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.52.0 - dev: false - - /node-domexception@1.0.0: - resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} - engines: {node: '>=10.5.0'} - dev: false - - /node-fetch@2.7.0: - resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - dependencies: - whatwg-url: 5.0.0 - dev: false - - /node-fetch@3.3.2: - resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - data-uri-to-buffer: 4.0.1 - fetch-blob: 3.2.0 - formdata-polyfill: 4.0.10 - dev: false - - /prisma@5.10.2: - resolution: {integrity: sha512-hqb/JMz9/kymRE25pMWCxkdyhbnIWrq+h7S6WysJpdnCvhstbJSNP/S6mScEcqiB8Qv2F+0R3yG+osRaWqZacQ==} - engines: {node: '>=16.13'} - hasBin: true - requiresBuild: true - dependencies: - '@prisma/engines': 5.10.2 - dev: false - - /tr46@0.0.3: - resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} - dev: false - - /tslib@2.6.2: - resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} - dev: false - - /undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - dev: false - - /web-streams-polyfill@3.2.1: - resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} - engines: {node: '>= 8'} - dev: false - - /webidl-conversions@3.0.1: - resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} - dev: false - - /whatwg-url@5.0.0: - resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} - dependencies: - tr46: 0.0.3 - webidl-conversions: 3.0.1 - dev: false - - /ws@8.14.2: - resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} - engines: {node: '>=10.0.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: '>=5.0.2' - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - dev: false diff --git a/query-engine/query-engine-wasm/example/prisma/schema.prisma b/query-engine/query-engine-wasm/example/prisma/schema.prisma deleted file mode 100644 index c6432a4a671f..000000000000 --- a/query-engine/query-engine-wasm/example/prisma/schema.prisma +++ /dev/null @@ -1,13 +0,0 @@ -datasource db { - provider = "sqlite" - url = "file:./dev.db" -} - -generator client { - provider = "prisma-client-js" - previewFeatures = ["driverAdapters", "tracing"] -} - -model User { - id Int @id @default(autoincrement()) -} diff --git a/renovate.json b/renovate.json index 89da07ea51fb..83ea8d3b2950 100644 --- a/renovate.json +++ b/renovate.json @@ -18,6 +18,9 @@ "rangeStrategy": "pin", "separateMinorPatch": true, "configMigration": true, + "ignoreDeps": [ + "query-engine-wasm-baseline", + ], "packageRules": [ { "matchFileNames": [ From dc73adbe249e46244830936fe4130caf08ce5810 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 15 Mar 2024 10:06:38 +0100 Subject: [PATCH 05/45] Undo merge frocks --- schema-engine/core/src/lib.rs | 2 +- schema-engine/core/src/state.rs | 11 +++-------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/schema-engine/core/src/lib.rs b/schema-engine/core/src/lib.rs index 64a1fb3daaa0..3ca75a596de0 100644 --- a/schema-engine/core/src/lib.rs +++ b/schema-engine/core/src/lib.rs @@ -13,7 +13,7 @@ pub mod commands; mod api; mod core_error; mod rpc; -pub mod state; +mod state; mod timings; pub use self::{api::GenericApi, core_error::*, rpc::rpc_api, timings::TimingsLayer}; diff --git a/schema-engine/core/src/state.rs b/schema-engine/core/src/state.rs index eb7894535c3d..224981af9e0b 100644 --- a/schema-engine/core/src/state.rs +++ b/schema-engine/core/src/state.rs @@ -18,7 +18,7 @@ use tracing_futures::Instrument; /// `connectors`. Each connector has its own async task, and communicates with the core through /// channels. That ensures that each connector is handling requests one at a time to avoid /// synchronization issues. You can think of it in terms of the actor model. -pub struct EngineState { +pub(crate) struct EngineState { initial_datamodel: Option, host: Arc, // A map from either: @@ -42,7 +42,7 @@ type ErasedConnectorRequest = Box< impl EngineState { /// TODO - pub fn new(initial_datamodel: Option, host: Option>) -> Self { + pub(crate) fn new(initial_datamodel: Option, host: Option>) -> Self { EngineState { initial_datamodel: initial_datamodel.map(|s| psl::validate(s.into())), host: host.unwrap_or_else(|| Arc::new(schema_connector::EmptyHost)), @@ -114,12 +114,7 @@ impl EngineState { response_receiver.await.expect("receiver boomed") } - /// TODO - pub async fn with_connector_for_url( - &self, - url: String, - f: ConnectorRequest, - ) -> CoreResult { + async fn with_connector_for_url(&self, url: String, f: ConnectorRequest) -> CoreResult { let (response_sender, response_receiver) = tokio::sync::oneshot::channel::>(); let erased: ErasedConnectorRequest = Box::new(move |connector| { Box::pin(async move { From a49b96aa2f7dd3ec376c0e90f587f438f609ef72 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 15 Mar 2024 14:36:30 +0100 Subject: [PATCH 06/45] Various fixes --- .../query-engine-tests/tests/new/interactive_tx.rs | 2 +- query-engine/driver-adapters/executor/package.json | 11 ++++++----- query-engine/driver-adapters/executor/src/rn.ts | 8 ++++++-- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index 7f038e0d369e..743c42154db8 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -71,7 +71,7 @@ mod interactive_tx { @r###"{"data":{"createOneTestModel":{"id":1}}}"### ); - time::sleep(time::Duration::from_millis(10500)).await; + time::sleep(time::Duration::from_millis(1500)).await; runner.clear_active_tx(); // Everything must be rolled back. diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index b30b752c02cb..fa5ff479e30c 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -22,17 +22,18 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { - "query-engine-wasm-latest": "npm:@prisma/query-engine-wasm@latest", - "query-engine-wasm-baseline": "npm:@prisma/query-engine-wasm@0.0.19", "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", - "@prisma/driver-adapter-utils": "workspace:*", "@prisma/bundled-js-drivers": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", + "axios": "^1.6.7", "mitata": "0.1.11", - "undici": "6.7.0", - "ws": "8.16.0" + "query-engine-wasm-baseline": "npm:@prisma/query-engine-wasm@0.0.19", + "query-engine-wasm-latest": "npm:@prisma/query-engine-wasm@latest", + "undici": "6.7.0", + "ws": "8.16.0" }, "devDependencies": { "@types/node": "20.11.24", diff --git a/query-engine/driver-adapters/executor/src/rn.ts b/query-engine/driver-adapters/executor/src/rn.ts index fd00e517bdbe..d8112c3971dd 100644 --- a/query-engine/driver-adapters/executor/src/rn.ts +++ b/query-engine/driver-adapters/executor/src/rn.ts @@ -1,8 +1,12 @@ import Axios from "axios"; -export function createRNEngineConnector(url: string, schema: string, logCallback: (msg: string) => void) { +export function createRNEngineConnector( + url: string, + schema: string, + logCallback: (msg: string) => void +) { const port = "3000"; - const baseIP = "192.168.0.14"; + const baseIP = "192.168.178.20"; const deviceUrl = `http://${baseIP}:${port}`; const axios = Axios.create({ baseURL: deviceUrl, From 1456a36cff18ba067ae5f19254cda0181fa0dde2 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 15 Mar 2024 14:39:09 +0100 Subject: [PATCH 07/45] Update query-engine/query-engine-c-abi/README.md Co-authored-by: Jan Piotrowski --- query-engine/query-engine-c-abi/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/query-engine-c-abi/README.md b/query-engine/query-engine-c-abi/README.md index 20d57d8ceae2..88a3c154f9f8 100644 --- a/query-engine/query-engine-c-abi/README.md +++ b/query-engine/query-engine-c-abi/README.md @@ -4,7 +4,7 @@ This version of the query engine exposes the Rust engine via C callable function ## Setup -You need to have XCode, Java, Androids NDK (you can/should install it via Android Studio), Cmake installed on your machine to compile the engine. The make file contains the main entry points for building the different architectures and platforms. You also need to install the target Rust architectures. You can find the exact [process described here](https://ospfranco.com/post/2023/08/11/react-native,-rust-step-by-step-integration-guide/). +You need to have Xcode, Java, Android's NDK (you can/should install it via Android Studio), Cmake installed on your machine to compile the engine. The make file contains the main entry points for building the different architectures and platforms. You also need to install the target Rust architectures. You can find the exact [process described here](https://ospfranco.com/post/2023/08/11/react-native,-rust-step-by-step-integration-guide/). - `make ios` → Builds the iOS libraries in release mode - `make sim` → Builds the simulator arch only for rapid development From c09dbb9f3a3e3afffded0186463c1550d7a02b41 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 15 Mar 2024 14:50:17 +0100 Subject: [PATCH 08/45] Update README --- query-engine/query-engine-c-abi/README.md | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/query-engine/query-engine-c-abi/README.md b/query-engine/query-engine-c-abi/README.md index 88a3c154f9f8..5e7d8aa1a8f8 100644 --- a/query-engine/query-engine-c-abi/README.md +++ b/query-engine/query-engine-c-abi/README.md @@ -1,4 +1,4 @@ -# Query Engine C (compatible) ABI +# UNSTABLE/EXPERIMENTAL Query Engine C (compatible) ABI This version of the query engine exposes the Rust engine via C callable functions. There are subtle differences to this implementation compared to the node and wasm versions. Although it is usable by any language that can operate with the C ABI, it is oriented to having prisma running on react-native so the build scripts are oriented to that goal. @@ -7,8 +7,9 @@ This version of the query engine exposes the Rust engine via C callable function You need to have Xcode, Java, Android's NDK (you can/should install it via Android Studio), Cmake installed on your machine to compile the engine. The make file contains the main entry points for building the different architectures and platforms. You also need to install the target Rust architectures. You can find the exact [process described here](https://ospfranco.com/post/2023/08/11/react-native,-rust-step-by-step-integration-guide/). - `make ios` → Builds the iOS libraries in release mode -- `make sim` → Builds the simulator arch only for rapid development +- `make sim` → Builds the simulator arch only in debug, much faster, meant for rapid development - `make android` → Builds all the android archs +- `make all` → Builds all the archs Once the libraries have been built there are a couple of extra scripts (`copy-ios.sh` and `copy-android.sh`) that move the results of the compilation into a sibling of the parent folder (`react-native-prisma`), which is where they will be packaged and published to npm. @@ -48,4 +49,8 @@ While `block_on` might not be the most efficient way to achieve things, it keeps The query engine (to be exact, different database connectors) depends on OpenSSL, however, the Rust crate tries to compile the latest version which [currently has a problem with Android armv7 architectures](https://github.com/openssl/openssl/pull/22181). In order to get around this, we have to download OpenSSL, patch it, compile and link it manually. The download, patching and compiling is scripted via the `build-openssl.sh` script. You need to have the Android NDK installed and the `ANDROID_NDK_ROOT` variable set in your environment before running this script. You can find more info on the script itself. The libraries will be outputed in the `libs` folder with the specific structure the Rust compilation needs to finish linking OpenSSL in the main query engine compilation. The crate `openssl` then uses the compiled version by detecting the `OPENSSL_DIR` flag which is set in the `build-android-target.sh` script. -Once the issues upstream are merged we can get rid of this custom compilation step. \ No newline at end of file +Once the issues upstream are merged we can get rid of this custom compilation step. + +## Tests + +The tests for React Native are dependant on JSI, meaning they cannot be run outside a device/simulator. The example app contains an HTTP server and the test setup has been reworked to send the requests via HTTP. The usual steps to running the tests are needed but you also need to be running the app and replace the IP address that appears on the screen in the `executor/rn.ts` file. From 0edea72b712ce600b972ff7eab0ebf1da686824f Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Tue, 19 Mar 2024 14:07:15 +0100 Subject: [PATCH 09/45] Lint --- .../query-tests-setup/src/config.rs | 4 ++-- query-engine/query-engine-c-abi/src/engine.rs | 14 +++++++------- query-engine/query-engine-c-abi/src/logger.rs | 8 ++++---- query-engine/query-engine-c-abi/src/migrations.rs | 2 +- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index 233bbca4a236..af504a5a4da3 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -279,7 +279,7 @@ impl TestConfig { } pub fn external_test_executor(&self) -> Option { - self.external_test_executor.clone() + self.external_test_executor } pub fn driver_adapter(&self) -> Option<&str> { @@ -318,7 +318,7 @@ impl TestConfig { ), ( "EXTERNAL_TEST_EXECUTOR".to_string(), - self.external_test_executor.clone().unwrap_or(TestExecutor::Napi).to_string(), + self.external_test_executor.unwrap_or(TestExecutor::Napi).to_string(), ), ( "PRISMA_DISABLE_QUAINT_EXECUTORS".to_string(), diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 95a72312b868..35a5dd716dc2 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -284,7 +284,7 @@ impl QueryEngine { let tx_id = get_cstr_safe(tx_id_str); let trace = get_cstr_safe(trace_str).expect("Trace is needed"); - let query = RequestBody::try_from_str(&body, engine.engine_protocol()).map_err(|err| Error::from(err))?; + let query = RequestBody::try_from_str(&body, engine.engine_protocol()).map_err(Error::from)?; let span = if tx_id.is_none() { tracing::info_span!("prisma:engine", user_facing = true) @@ -547,7 +547,7 @@ pub unsafe extern "C" fn prisma_query( *error_string_ptr = Box::into_raw(Box::new(error_string.as_ptr())) as *mut c_char; }); std::mem::forget(query_engine); - return null_mut(); + null_mut() } } } @@ -567,7 +567,7 @@ pub unsafe extern "C" fn prisma_start_transaction( } Err(_err) => { std::mem::forget(query_engine); - return null_mut(); + null_mut() } } } @@ -584,7 +584,7 @@ pub unsafe extern "C" fn prisma_commit_transaction( match result { Ok(query_result) => CString::new(query_result).unwrap().into_raw(), Err(_err) => { - return null_mut(); + null_mut() } } } @@ -601,7 +601,7 @@ pub unsafe extern "C" fn prisma_rollback_transaction( match result { Ok(query_result) => CString::new(query_result).unwrap().into_raw(), Err(_err) => { - return null_mut(); + null_mut() } } } @@ -614,7 +614,7 @@ pub unsafe extern "C" fn prisma_disconnect(qe: *mut QueryEngine, header_str: *co match result { Ok(_) => PRISMA_OK, Err(_err) => { - return PRISMA_UNKNOWN_ERROR; + PRISMA_UNKNOWN_ERROR } } } @@ -640,7 +640,7 @@ pub unsafe extern "C" fn prisma_apply_pending_migrations( *error_string_ptr = error_string.as_ptr() as *mut c_char; }); std::mem::forget(query_engine); - return PRISMA_UNKNOWN_ERROR; + PRISMA_UNKNOWN_ERROR } } } diff --git a/query-engine/query-engine-c-abi/src/logger.rs b/query-engine/query-engine-c-abi/src/logger.rs index b6b0d7c06524..1970262c207f 100644 --- a/query-engine/query-engine-c-abi/src/logger.rs +++ b/query-engine/query-engine-c-abi/src/logger.rs @@ -135,14 +135,14 @@ impl<'a> ToString for JsonVisitor<'a> { #[derive(Clone)] pub(crate) struct CallbackLayer where - F: Fn(String) -> () + 'static, + F: Fn(String) + 'static, { callback: Arc, } impl CallbackLayer where - F: Fn(String) -> () + 'static, + F: Fn(String) + 'static, { pub fn new(callback: Arc) -> Self { CallbackLayer { callback } @@ -151,7 +151,7 @@ where impl StringCallback for CallbackLayer where - F: Fn(String) -> () + 'static, + F: Fn(String) + 'static, { fn call(&self, message: String) -> Result<(), String> { let callback = &self.callback; @@ -164,7 +164,7 @@ where impl Layer for CallbackLayer where S: Subscriber, - F: Fn(String) -> (), + F: Fn(String), { fn on_event(&self, event: &tracing::Event<'_>, _ctx: tracing_subscriber::layer::Context<'_, S>) { let mut visitor = JsonVisitor::new(event.metadata().level(), event.metadata().target()); diff --git a/query-engine/query-engine-c-abi/src/migrations.rs b/query-engine/query-engine-c-abi/src/migrations.rs index 5f01717b4bbc..ffba8e2b39ee 100644 --- a/query-engine/query-engine-c-abi/src/migrations.rs +++ b/query-engine/query-engine-c-abi/src/migrations.rs @@ -35,7 +35,7 @@ impl MigrationDirectory { /// Read the migration script to a string. pub fn read_migration_script(&self) -> Result { let path = self.path.join("migration.sql"); - std::fs::read_to_string(&path).map_err(|ioerr| UnknownError::new(&ioerr).into()) + std::fs::read_to_string(path).map_err(|ioerr| UnknownError::new(&ioerr).into()) } } From 77652cd706b2ae969001a17838abe67c8fd56c9a Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Tue, 19 Mar 2024 14:49:39 +0100 Subject: [PATCH 10/45] Safety comments --- query-engine/query-engine-c-abi/src/engine.rs | 43 ++++++++++++++----- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 35a5dd716dc2..98db9a6cc7a1 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -79,7 +79,7 @@ pub struct ConstructorOptions { } fn get_cstr_safe(ptr: *const c_char) -> Option { - if ptr == std::ptr::null_mut() { + if ptr.is_null() { None } else { let cstr = unsafe { CStr::from_ptr(ptr) }; @@ -478,9 +478,12 @@ impl QueryEngine { // Be should be careful about not de-allocating the pointer // when adding a new function remember to always call mem::forget +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference #[no_mangle] pub unsafe extern "C" fn prisma_create(options: ConstructorOptions, qe_ptr: *mut *mut QueryEngine) -> c_int { - if qe_ptr == std::ptr::null_mut() { + if qe_ptr.is_null() { return PRISMA_MISSING_POINTER; } @@ -497,6 +500,9 @@ pub unsafe extern "C" fn prisma_create(options: ConstructorOptions, qe_ptr: *mut } } +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string #[no_mangle] pub unsafe extern "C" fn prisma_connect( qe: *mut QueryEngine, @@ -524,6 +530,9 @@ pub unsafe extern "C" fn prisma_connect( } } +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string #[no_mangle] pub unsafe extern "C" fn prisma_query( qe: *mut QueryEngine, @@ -552,6 +561,9 @@ pub unsafe extern "C" fn prisma_query( } } +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string #[no_mangle] pub unsafe extern "C" fn prisma_start_transaction( qe: *mut QueryEngine, @@ -572,6 +584,9 @@ pub unsafe extern "C" fn prisma_start_transaction( } } +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string #[no_mangle] pub unsafe extern "C" fn prisma_commit_transaction( qe: *mut QueryEngine, @@ -583,12 +598,13 @@ pub unsafe extern "C" fn prisma_commit_transaction( std::mem::forget(query_engine); match result { Ok(query_result) => CString::new(query_result).unwrap().into_raw(), - Err(_err) => { - null_mut() - } + Err(_err) => null_mut(), } } +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string #[no_mangle] pub unsafe extern "C" fn prisma_rollback_transaction( qe: *mut QueryEngine, @@ -600,12 +616,13 @@ pub unsafe extern "C" fn prisma_rollback_transaction( std::mem::forget(query_engine); match result { Ok(query_result) => CString::new(query_result).unwrap().into_raw(), - Err(_err) => { - null_mut() - } + Err(_err) => null_mut(), } } +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string #[no_mangle] pub unsafe extern "C" fn prisma_disconnect(qe: *mut QueryEngine, header_str: *const c_char) -> c_int { let query_engine: Box = Box::from_raw(qe); @@ -613,12 +630,13 @@ pub unsafe extern "C" fn prisma_disconnect(qe: *mut QueryEngine, header_str: *co std::mem::forget(query_engine); match result { Ok(_) => PRISMA_OK, - Err(_err) => { - PRISMA_UNKNOWN_ERROR - } + Err(_err) => PRISMA_UNKNOWN_ERROR, } } +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string #[no_mangle] pub unsafe extern "C" fn prisma_apply_pending_migrations( qe: *mut QueryEngine, @@ -645,6 +663,9 @@ pub unsafe extern "C" fn prisma_apply_pending_migrations( } } +/// # Safety +/// +/// Will destroy the pointer to the query engine #[no_mangle] pub unsafe extern "C" fn prisma_destroy(qe: *mut QueryEngine) -> c_int { // Once the variable goes out of scope, it will be deallocated From 22c5ed7bb6dc353336af0d62aafa4c2f9412a049 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Tue, 19 Mar 2024 14:55:48 +0100 Subject: [PATCH 11/45] Shellcheck --- .../query-engine-c-abi/build-android-target.sh | 2 +- query-engine/query-engine-c-abi/build-openssl.sh | 16 +++++++--------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/query-engine/query-engine-c-abi/build-android-target.sh b/query-engine/query-engine-c-abi/build-android-target.sh index 16104cfef11c..08fd0e0f26a7 100755 --- a/query-engine/query-engine-c-abi/build-android-target.sh +++ b/query-engine/query-engine-c-abi/build-android-target.sh @@ -57,4 +57,4 @@ CC=$TOOLS/bin/${NDK_TARGET}${API_VERSION}-clang \ CXX=$TOOLS/bin/${NDK_TARGET}${API_VERSION}-clang++ \ RANLIB=$TOOLS/bin/llvm-ranlib \ CXXFLAGS="--target=$NDK_TARGET" \ -cargo build --release --target $TARGET $EXTRA_ARGS \ No newline at end of file +cargo build --release --target "$TARGET" "$EXTRA_ARGS" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/build-openssl.sh b/query-engine/query-engine-c-abi/build-openssl.sh index 88621d06639c..878d4ed727ae 100755 --- a/query-engine/query-engine-c-abi/build-openssl.sh +++ b/query-engine/query-engine-c-abi/build-openssl.sh @@ -11,7 +11,6 @@ if [ ! -d ${OPENSSL_VERSION}.tar.gz ]; then tar xfz "${OPENSSL_VERSION}.tar.gz" fi -PROJECT_HOME=`pwd` PATH_ORG=$PATH OUTPUT_DIR="libs" @@ -22,12 +21,11 @@ mkdir $OUTPUT_DIR build_android_clang() { echo "" - echo "----- Build libcrypto & libssl.so for "$1" -----" + echo "----- Build libcrypto & libssl.so for $1 -----" echo "" ARCHITECTURE=$1 TOOLCHAIN=$2 - stl="libc++" # Set toolchain export TOOLCHAIN_ROOT=$ANDROID_NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64 @@ -41,7 +39,7 @@ build_android_clang() { cd "${OPENSSL_VERSION}" - ./Configure $ARCHITECTURE no-asm no-shared -D__ANDROID_API__=21 + ./Configure "$ARCHITECTURE" no-asm no-shared -D__ANDROID_API__=21 make clean # Apply patch that fixes the armcap instruction @@ -52,18 +50,18 @@ build_android_clang() { make - mkdir -p ../$OUTPUT_DIR/${ARCHITECTURE}/lib - mkdir -p ../$OUTPUT_DIR/${ARCHITECTURE}/include + mkdir -p ../$OUTPUT_DIR/"${ARCHITECTURE}"/lib + mkdir -p ../$OUTPUT_DIR/"${ARCHITECTURE}"/include # file libcrypto.so # file libssl.so - cp libcrypto.a ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libcrypto.a - cp libssl.a ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libssl.a + cp libcrypto.a ../$OUTPUT_DIR/"${ARCHITECTURE}"/lib/libcrypto.a + cp libssl.a ../$OUTPUT_DIR/"${ARCHITECTURE}"/lib/libssl.a # cp libcrypto.so ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libcrypto.so # cp libssl.so ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libssl.so - cp -R include/openssl ../$OUTPUT_DIR/${ARCHITECTURE}/include + cp -R include/openssl ../$OUTPUT_DIR/"${ARCHITECTURE}"/include cd .. } From 2d2b0881f294d475caf2419406c61e1ad1f5d971 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Wed, 20 Mar 2024 14:20:05 +0100 Subject: [PATCH 12/45] PR changes --- query-engine/connector-test-kit-rs/README.md | 2 +- .../query-engine-tests/tests/new/metrics.rs | 2 +- .../src/connector_tag/sqlite.rs | 6 +- .../test-configs/react-native | 2 +- .../connectors/sql-query-connector/Cargo.toml | 4 - .../driver-adapters/executor/package.json | 1 - .../driver-adapters/executor/src/rn.ts | 98 +++++++++++-------- .../driver-adapters/executor/src/testd.ts | 6 +- 8 files changed, 66 insertions(+), 55 deletions(-) diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 4b44b96c16ba..5d8fbcc148bb 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -84,7 +84,7 @@ To run tests through a driver adapters, you should also configure the following * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. -* `ENGINE`: can be used to run either `wasm` or `napi` or `mobile` version of the engine. +* `ENGINE`: can be used to run either `wasm` or `napi` or `c-abi` version of the engine. Example: diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index d47dea61e186..ff55e7f864da 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -5,7 +5,7 @@ use query_engine_tests::test_suite; exclude( Vitess("planetscale.js", "planetscale.js.wasm"), Postgres("neon.js", "pg.js", "neon.js.wasm", "pg.js.wasm"), - Sqlite("libsql.js", "libsql.js.wasm", "rn") + Sqlite("libsql.js", "libsql.js.wasm", "react-native") ) )] mod metrics { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs index 2194f6e6404b..c10c8c618de0 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs @@ -29,7 +29,7 @@ impl ConnectorTagInterface for SqliteConnectorTag { #[derive(Clone, Debug, PartialEq, Eq)] pub enum SqliteVersion { V3, - RN, + ReactNative, LibsqlJsNapi, LibsqlJsWasm, } @@ -37,7 +37,7 @@ pub enum SqliteVersion { impl ToString for SqliteVersion { fn to_string(&self) -> String { match self { - SqliteVersion::RN => "rn".to_string(), + SqliteVersion::ReactNative => "react-native".to_string(), SqliteVersion::V3 => "3".to_string(), SqliteVersion::LibsqlJsNapi => "libsql.js".to_string(), SqliteVersion::LibsqlJsWasm => "libsql.js.wasm".to_string(), @@ -53,7 +53,7 @@ impl TryFrom<&str> for SqliteVersion { "3" => Self::V3, "libsql.js" => Self::LibsqlJsNapi, "libsql.js.wasm" => Self::LibsqlJsWasm, - "rn" => Self::RN, + "react-native" => Self::ReactNative, _ => return Err(TestError::parse_error(format!("Unknown SQLite version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/test-configs/react-native b/query-engine/connector-test-kit-rs/test-configs/react-native index 0f1ab57860e3..a26c5a358802 100644 --- a/query-engine/connector-test-kit-rs/test-configs/react-native +++ b/query-engine/connector-test-kit-rs/test-configs/react-native @@ -1,5 +1,5 @@ { "connector": "sqlite", - "version": "rn", + "version": "react-native", "external_test_executor": "Mobile" } diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 68ef5bf4494d..c7152688629c 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -38,10 +38,6 @@ opentelemetry = { version = "0.17", features = ["tokio"] } tracing-opentelemetry = "0.17.3" cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } quaint.workspace = true -# quaint = { path = "../../../quaint", default-features = false, features = [ -# "sqlite", -# ] } - [dependencies.connector-interface] package = "query-connector" diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index fa5ff479e30c..ac33aa94b2ca 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -28,7 +28,6 @@ "@prisma/adapter-planetscale": "workspace:*", "@prisma/bundled-js-drivers": "workspace:*", "@prisma/driver-adapter-utils": "workspace:*", - "axios": "^1.6.7", "mitata": "0.1.11", "query-engine-wasm-baseline": "npm:@prisma/query-engine-wasm@0.0.19", "query-engine-wasm-latest": "npm:@prisma/query-engine-wasm@latest", diff --git a/query-engine/driver-adapters/executor/src/rn.ts b/query-engine/driver-adapters/executor/src/rn.ts index d8112c3971dd..ad451283e0fc 100644 --- a/query-engine/driver-adapters/executor/src/rn.ts +++ b/query-engine/driver-adapters/executor/src/rn.ts @@ -1,5 +1,3 @@ -import Axios from "axios"; - export function createRNEngineConnector( url: string, schema: string, @@ -8,75 +6,93 @@ export function createRNEngineConnector( const port = "3000"; const baseIP = "192.168.178.20"; const deviceUrl = `http://${baseIP}:${port}`; - const axios = Axios.create({ - baseURL: deviceUrl, - headers: { - "Content-Type": "application/json", - }, - transformResponse: (r) => r, - }); - - // axios.get("/ping").then(() => { - // console.error(`✅ Connection to RN device successful! URL: ${deviceUrl}`); - // }).catch(() => { - // throw new Error(`Could not ping device! Check server is runing on IP: ${deviceUrl}`) - // }) + const headers = { + "Content-Type": "application/json", + Accept: "application/json", + }; return { connect: async () => { - const res = await axios.post(`/connect`, { - schema, + const res = await fetch(`${deviceUrl}/connect`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ schema }), }); - return res.data; + + return await res.json(); }, query: async ( body: string, trace: string, txId: string ): Promise => { - const res = await axios.post("/query", { - body, - trace, - txId, + const res = await fetch(`${deviceUrl}/query`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + body, + trace, + txId, + }), }); - const response = JSON.parse(res.data) + const response = await res.json(); - if(response.logs.length) { - response.logs.forEach(logCallback) + if (response.logs.length) { + response.logs.forEach(logCallback); } return response.engineResponse; }, startTransaction: async (body: string, trace: string): Promise => { - const res = await axios.post("/start_transaction", { - body, - trace, + const res = await fetch(`${deviceUrl}/start_transaction`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + body, + trace, + }), }); - // console.error("start transaction data", res.data); - return res.data; + return await res.json(); }, commitTransaction: async (txId: string, trace: string): Promise => { - const res = await axios.post("/commit_transaction", { - txId, - trace, + const res = await fetch(`${deviceUrl}/commit_transaction`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + txId, + trace, + }), }); - // console.error(`🐲 ${res.data}`); - return res.data; + return res.json(); }, rollbackTransaction: async ( txId: string, trace: string ): Promise => { - const res = await axios.post("/rollback_transaction", { - txId, - trace, + const res = await fetch(`${deviceUrl}/rollback_transaction`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + txId, + trace, + }), }); - return res.data; + return res.json(); }, disconnect: async (trace: string) => { - await axios.post("/disconnect", { - trace, + await fetch(`${deviceUrl}/disconnect`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + trace, + }), }); }, }; diff --git a/query-engine/driver-adapters/executor/src/testd.ts b/query-engine/driver-adapters/executor/src/testd.ts index 67c11c333426..cc9a3de3cf4c 100644 --- a/query-engine/driver-adapters/executor/src/testd.ts +++ b/query-engine/driver-adapters/executor/src/testd.ts @@ -113,8 +113,8 @@ async function handleRequest(method: string, params: unknown): Promise txId?: string } - // debug("🔷 Test query params") - // debug('\x1b[36m', JSON.stringify(params, null, 2), '\x1b[0m'); + debug("🔷 Test query params") + debug('\x1b[36m', JSON.stringify(params, null, 2), '\x1b[0m'); const castParams = params as QueryPayload; const engine = state[castParams.schemaId].engine const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId) @@ -132,7 +132,7 @@ async function handleRequest(method: string, params: unknown): Promise } } - // debug("🟢 Engine response: ", result) + debug("🟢 Engine response: ", result) // returning unparsed string: otherwise, some information gots lost during this round-trip. // In particular, floating point without decimal part turn into integers return result From b666f9c95e4ecf826473fe72450cd11a066379cd Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Wed, 20 Mar 2024 15:12:05 +0100 Subject: [PATCH 13/45] Update query-engine/query-engine-c-abi/README.md Co-authored-by: Flavian Desverne --- query-engine/query-engine-c-abi/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/query-engine-c-abi/README.md b/query-engine/query-engine-c-abi/README.md index 5e7d8aa1a8f8..89f9ef2b68ee 100644 --- a/query-engine/query-engine-c-abi/README.md +++ b/query-engine/query-engine-c-abi/README.md @@ -41,7 +41,7 @@ Like any C-API, returning multiple chunks of data is done via passing pointers ( Each operation should return an integer status code that indicates PRISMA_OK (0) if the opereation finished correctly or different error codes for each possible error. -C calls are not compatible with tokio/async, so the C functions need to use `block_on` in order to keep synchronisity. If async functionality is wanted the calling language/environment should spin up their own threads and call the functions in there. +C calls are not compatible with tokio/async, so the C functions need to use `block_on` in order to keep synchronicity. If async functionality is wanted the calling language/environment should spin up their own threads and call the functions in there. While `block_on` might not be the most efficient way to achieve things, it keeps changes to the core query_engine functionality at a minimum. From 4055d1d4c2068b1768fd54aeb978626ce7ebfb24 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Wed, 20 Mar 2024 16:15:28 +0100 Subject: [PATCH 14/45] PR comments --- .../build-android-target.sh | 4 +- query-engine/query-engine-c-abi/src/engine.rs | 8 +- query-engine/query-engine-c-abi/src/error.rs | 103 ------------------ query-engine/query-engine-c-abi/src/lib.rs | 3 +- 4 files changed, 9 insertions(+), 109 deletions(-) delete mode 100644 query-engine/query-engine-c-abi/src/error.rs diff --git a/query-engine/query-engine-c-abi/build-android-target.sh b/query-engine/query-engine-c-abi/build-android-target.sh index 08fd0e0f26a7..823888f02473 100755 --- a/query-engine/query-engine-c-abi/build-android-target.sh +++ b/query-engine/query-engine-c-abi/build-android-target.sh @@ -1,5 +1,7 @@ #!/bin/bash +set -ex + TARGET="$1" if [ "$TARGET" = "" ]; then @@ -57,4 +59,4 @@ CC=$TOOLS/bin/${NDK_TARGET}${API_VERSION}-clang \ CXX=$TOOLS/bin/${NDK_TARGET}${API_VERSION}-clang++ \ RANLIB=$TOOLS/bin/llvm-ranlib \ CXXFLAGS="--target=$NDK_TARGET" \ -cargo build --release --target "$TARGET" "$EXTRA_ARGS" \ No newline at end of file +cargo build --release --target "$TARGET" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 98db9a6cc7a1..d646134ca67e 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -1,5 +1,4 @@ use crate::{ - error::ApiError, logger::Logger, migrations::{ execute_migration_script, list_migration_dir, list_migrations, record_migration_started, MigrationDirectory, @@ -27,8 +26,9 @@ use tokio::{ use tracing::{field, instrument::WithSubscriber, level_filters::LevelFilter, Instrument, Span}; use user_facing_errors::Error; -use query_engine_common::engine::{ - stringify_env_values, ConnectedEngine, ConnectedEngineNative, EngineBuilder, EngineBuilderNative, Inner, +use query_engine_common::{ + engine::{stringify_env_values, ConnectedEngine, ConnectedEngineNative, EngineBuilder, EngineBuilderNative, Inner}, + error::ApiError, }; use request_handlers::ConnectorKind; // use tracing_subscriber::filter::LevelFilter; @@ -216,7 +216,7 @@ impl QueryEngine { .load_url_with_config_dir(&builder.native.config_dir, |key| { builder.native.env.get(key).map(ToString::to_string) }) - .map_err(|err| crate::error::ApiError::Conversion(err, builder.schema.db.source().to_owned()))?; + .map_err(|err| ApiError::Conversion(err, builder.schema.db.source().to_owned()))?; // This version of the query engine supports connecting via Rust bindings directly // support for JS drivers can be added, but I commented it out for now let connector_kind = ConnectorKind::Rust { diff --git a/query-engine/query-engine-c-abi/src/error.rs b/query-engine/query-engine-c-abi/src/error.rs deleted file mode 100644 index 71ef513173fc..000000000000 --- a/query-engine/query-engine-c-abi/src/error.rs +++ /dev/null @@ -1,103 +0,0 @@ -use psl::diagnostics::Diagnostics; -use query_connector::error::ConnectorError; -use query_core::CoreError; -use thiserror::Error; - -#[derive(Debug, Error)] -pub enum ApiError { - #[error("{:?}", _0)] - Conversion(Diagnostics, String), - - #[error("{}", _0)] - Configuration(String), - - #[error("{}", _0)] - Core(CoreError), - - #[error("{}", _0)] - Connector(ConnectorError), - - #[error("Can't modify an already connected engine.")] - AlreadyConnected, - - #[error("Engine is not yet connected.")] - NotConnected, - - #[error("{}", _0)] - JsonDecode(String), -} - -impl From for user_facing_errors::Error { - fn from(err: ApiError) -> Self { - use std::fmt::Write as _; - - match err { - ApiError::Connector(ConnectorError { - user_facing_error: Some(err), - .. - }) => err.into(), - ApiError::Conversion(errors, dml_string) => { - let mut full_error = errors.to_pretty_string("schema.prisma", &dml_string); - write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); - - user_facing_errors::Error::from(user_facing_errors::KnownError::new( - user_facing_errors::common::SchemaParserError { full_error }, - )) - } - ApiError::Core(error) => user_facing_errors::Error::from(error), - other => user_facing_errors::Error::new_non_panic_with_current_backtrace(other.to_string()), - } - } -} - -impl ApiError { - pub fn conversion(diagnostics: Diagnostics, dml: impl ToString) -> Self { - Self::Conversion(diagnostics, dml.to_string()) - } - - pub fn configuration(msg: impl ToString) -> Self { - Self::Configuration(msg.to_string()) - } -} - -impl From for ApiError { - fn from(e: CoreError) -> Self { - match e { - CoreError::ConfigurationError(message) => Self::Configuration(message), - core_error => Self::Core(core_error), - } - } -} - -impl From for ApiError { - fn from(e: ConnectorError) -> Self { - Self::Connector(e) - } -} - -impl From for ApiError { - fn from(e: url::ParseError) -> Self { - Self::configuration(format!("Error parsing connection string: {e}")) - } -} - -impl From for ApiError { - fn from(e: connection_string::Error) -> Self { - Self::configuration(format!("Error parsing connection string: {e}")) - } -} - -impl From for ApiError { - fn from(e: serde_json::Error) -> Self { - Self::JsonDecode(format!("{e}")) - } -} - -// impl From for napi::Error { -// fn from(e: ApiError) -> Self { -// let user_facing = user_facing_errors::Error::from(e); -// let message = serde_json::to_string(&user_facing).unwrap(); - -// napi::Error::from_reason(message) -// } -// } diff --git a/query-engine/query-engine-c-abi/src/lib.rs b/query-engine/query-engine-c-abi/src/lib.rs index 9dcb1f74ea33..d4bcf6b10c5c 100644 --- a/query-engine/query-engine-c-abi/src/lib.rs +++ b/query-engine/query-engine-c-abi/src/lib.rs @@ -1,5 +1,6 @@ +use query_engine_common::error; + pub mod engine; -pub mod error; pub mod functions; pub mod logger; pub mod migrations; From f61fd34b6ee75193086fe79f0662ccf803352055 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Wed, 20 Mar 2024 17:47:58 +0100 Subject: [PATCH 15/45] Get rid of functions file --- .../query-engine-c-abi/src/functions.rs | 40 ------------------- query-engine/query-engine-c-abi/src/lib.rs | 1 - 2 files changed, 41 deletions(-) delete mode 100644 query-engine/query-engine-c-abi/src/functions.rs diff --git a/query-engine/query-engine-c-abi/src/functions.rs b/query-engine/query-engine-c-abi/src/functions.rs deleted file mode 100644 index ec97a98e1f22..000000000000 --- a/query-engine/query-engine-c-abi/src/functions.rs +++ /dev/null @@ -1,40 +0,0 @@ -// use request_handlers::dmmf; -// use std::sync::Arc; - -// use crate::error::ApiError; - -#[derive(serde::Serialize, Clone, Copy)] -pub struct Version { - pub commit: &'static str, - pub version: &'static str, -} - -pub fn version() -> Version { - Version { - commit: env!("GIT_HASH"), - version: env!("CARGO_PKG_VERSION"), - } -} - -// pub fn dmmf(datamodel_string: String) -> napi::Result { -// let mut schema = psl::validate(datamodel_string.into()); - -// schema -// .diagnostics -// .to_result() -// .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; - -// let query_schema = query_core::schema::build(Arc::new(schema), true); -// let dmmf = dmmf::render_dmmf(&query_schema); - -// Ok(serde_json::to_string(&dmmf)?) -// } - -// pub fn debug_panic(panic_message: Option) { -// let user_facing = user_facing_errors::Error::from_panic_payload(Box::new( -// panic_message.unwrap_or_else(|| "query-engine-node-api debug panic".to_string()), -// )); -// let message = serde_json::to_string(&user_facing).unwrap(); - -// Err(napi::Error::from_reason(message)) -// } diff --git a/query-engine/query-engine-c-abi/src/lib.rs b/query-engine/query-engine-c-abi/src/lib.rs index d4bcf6b10c5c..e2a2f0d43ddc 100644 --- a/query-engine/query-engine-c-abi/src/lib.rs +++ b/query-engine/query-engine-c-abi/src/lib.rs @@ -1,7 +1,6 @@ use query_engine_common::error; pub mod engine; -pub mod functions; pub mod logger; pub mod migrations; From 7cc19325890954008d6e8c52e2790288e81a0b62 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Wed, 20 Mar 2024 20:07:18 +0100 Subject: [PATCH 16/45] Remove comments --- query-engine/query-engine-c-abi/src/engine.rs | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index d646134ca67e..01a3df2719fc 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -130,8 +130,6 @@ impl QueryEngine { let mut schema = psl::validate(datamodel.into()); let config = &mut schema.configuration; - // let preview_features = config.preview_features(); - schema .diagnostics .to_result() @@ -156,8 +154,6 @@ impl QueryEngine { .validate_that_one_datasource_is_provided() .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; - // let enable_tracing = config.preview_features().contains(PreviewFeature::Tracing); - // let engine_protocol = constructor_options.engine_protocol.unwrap_or(EngineProtocol::Json); let engine_protocol = EngineProtocol::Json; let config_dir_string = get_cstr_safe(constructor_options.native.config_dir).expect("Config dir is expected"); @@ -311,15 +307,12 @@ impl QueryEngine { /// Disconnect and drop the core. Can be reconnected later with `#connect`. pub async fn disconnect(&self, trace_str: *const c_char) -> Result<(), Error> { let trace = get_cstr_safe(trace_str).expect("Trace is needed"); - // let dispatcher = self.logger.dispatcher(); + let dispatcher = self.logger.dispatcher(); - // async_panic_to_js_error(async { let span = tracing::info_span!("prisma:engine:disconnect"); let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); - // TODO: when using Node Drivers, we need to call Driver::close() here. - - // async { + // async move { let mut inner = self.inner.write().await; let engine = inner.as_engine()?; @@ -338,7 +331,6 @@ impl QueryEngine { // } // .instrument(span) // .await - // }) // .with_subscriber(dispatcher) // .await } @@ -392,9 +384,6 @@ impl QueryEngine { applied_migration_names.push(unapplied_migration.migration_name().to_owned()); } - //output applied migrations - dbg!(applied_migration_names); - Ok(()) } From 88ec9b1fefdc025575bd51c6f66a05b4170e2928 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Thu, 21 Mar 2024 06:53:12 +0100 Subject: [PATCH 17/45] Replace all custom errors with the common ApiError --- query-engine/query-engine-c-abi/src/engine.rs | 34 ++++++------ query-engine/query-engine-c-abi/src/lib.rs | 10 ++-- .../query-engine-c-abi/src/migrations.rs | 52 +++++++++---------- 3 files changed, 46 insertions(+), 50 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 01a3df2719fc..bd4abb715a92 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -24,8 +24,8 @@ use tokio::{ sync::RwLock, }; use tracing::{field, instrument::WithSubscriber, level_filters::LevelFilter, Instrument, Span}; -use user_facing_errors::Error; +use query_engine_common::Result; use query_engine_common::{ engine::{stringify_env_values, ConnectedEngine, ConnectedEngineNative, EngineBuilder, EngineBuilderNative, Inner}, error::ApiError, @@ -87,7 +87,7 @@ fn get_cstr_safe(ptr: *const c_char) -> Option { } } -fn map_known_error(err: query_core::CoreError) -> crate::Result { +fn map_known_error(err: query_core::CoreError) -> Result { let user_error: user_facing_errors::Error = err.into(); let value = serde_json::to_string(&user_error)?; @@ -100,7 +100,7 @@ fn map_known_error(err: query_core::CoreError) -> crate::Result { // a call to connect is necessary to start executing queries impl QueryEngine { /// Parse a valid datamodel and configuration to allow connecting later on. - pub fn new(constructor_options: ConstructorOptions) -> Result { + pub fn new(constructor_options: ConstructorOptions) -> Result { // Create a logs closure that can be passed around and called at any time // safe scheduling should be taken care by the code/language/environment calling this C-compatible API let engine_id = get_cstr_safe(constructor_options.id).expect("engine id cannot be missing"); @@ -182,7 +182,7 @@ impl QueryEngine { }) } - pub async fn connect(&self, trace: *const c_char) -> Result<(), Error> { + pub async fn connect(&self, trace: *const c_char) -> Result<()> { if let Some(base_path) = self.base_path.as_ref() { env::set_current_dir(Path::new(&base_path)).expect("Could not change directory"); } @@ -231,7 +231,7 @@ impl QueryEngine { connector.get_connection().instrument(conn_span).await?; - crate::Result::<_>::Ok(executor) + Result::<_>::Ok(executor) }; let query_schema_span = tracing::info_span!("prisma:engine:schema"); @@ -255,7 +255,7 @@ impl QueryEngine { #[cfg(all(not(target_os = "ios"), not(target_os = "android")))] metrics: None, }, - }) as crate::Result + }) as Result } .instrument(span) .await?; @@ -269,7 +269,7 @@ impl QueryEngine { body_str: *const c_char, trace_str: *const c_char, tx_id_str: *const c_char, - ) -> Result { + ) -> Result { let dispatcher = self.logger.dispatcher(); async move { @@ -280,7 +280,7 @@ impl QueryEngine { let tx_id = get_cstr_safe(tx_id_str); let trace = get_cstr_safe(trace_str).expect("Trace is needed"); - let query = RequestBody::try_from_str(&body, engine.engine_protocol()).map_err(Error::from)?; + let query = RequestBody::try_from_str(&body, engine.engine_protocol())?; let span = if tx_id.is_none() { tracing::info_span!("prisma:engine", user_facing = true) @@ -305,9 +305,9 @@ impl QueryEngine { } /// Disconnect and drop the core. Can be reconnected later with `#connect`. - pub async fn disconnect(&self, trace_str: *const c_char) -> Result<(), Error> { + pub async fn disconnect(&self, trace_str: *const c_char) -> Result<()> { let trace = get_cstr_safe(trace_str).expect("Trace is needed"); - let dispatcher = self.logger.dispatcher(); + // let dispatcher = self.logger.dispatcher(); let span = tracing::info_span!("prisma:engine:disconnect"); let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); @@ -335,7 +335,7 @@ impl QueryEngine { // .await } - async unsafe fn apply_migrations(&self, migration_folder_path: *const c_char) -> Result<(), Error> { + async unsafe fn apply_migrations(&self, migration_folder_path: *const c_char) -> Result<()> { if let Some(base_path) = self.base_path.as_ref() { env::set_current_dir(Path::new(&base_path)).expect("Could not change directory"); } @@ -388,7 +388,7 @@ impl QueryEngine { } /// If connected, attempts to start a transaction in the core and returns its ID. - pub async fn start_transaction(&self, input_str: *const c_char, trace_str: *const c_char) -> Result { + pub async fn start_transaction(&self, input_str: *const c_char, trace_str: *const c_char) -> Result { let input = get_cstr_safe(input_str).expect("Input string missing"); let trace = get_cstr_safe(trace_str).expect("trace is required in transactions"); let inner = self.inner.read().await; @@ -417,7 +417,7 @@ impl QueryEngine { } // If connected, attempts to commit a transaction with id `tx_id` in the core. - pub async fn commit_transaction(&self, tx_id_str: *const c_char, _trace: *const c_char) -> Result { + pub async fn commit_transaction(&self, tx_id_str: *const c_char, _trace: *const c_char) -> Result { let tx_id = get_cstr_safe(tx_id_str).expect("Input string missing"); let inner = self.inner.read().await; let engine = inner.as_engine()?; @@ -435,7 +435,7 @@ impl QueryEngine { } // If connected, attempts to roll back a transaction with id `tx_id` in the core. - pub async fn rollback_transaction(&self, tx_id_str: *const c_char, _trace: *const c_char) -> Result { + pub async fn rollback_transaction(&self, tx_id_str: *const c_char, _trace: *const c_char) -> Result { let tx_id = get_cstr_safe(tx_id_str).expect("Input string missing"); // let trace = get_cstr_safe(trace_str).expect("trace is required in transactions"); let inner = self.inner.read().await; @@ -510,7 +510,7 @@ pub unsafe extern "C" fn prisma_connect( Err(err) => { RUNTIME.block_on(async { let mut error_string = query_engine.error_string.write().await; - *error_string = CString::new(err.message()).unwrap(); + *error_string = CString::new(err.to_string()).unwrap(); *error_string_ptr = error_string.as_ptr() as *mut c_char; }); std::mem::forget(query_engine); @@ -541,7 +541,7 @@ pub unsafe extern "C" fn prisma_query( Err(err) => { RUNTIME.block_on(async { let mut error_string = query_engine.error_string.write().await; - *error_string = CString::new(err.message()).unwrap(); + *error_string = CString::new(err.to_string()).unwrap(); *error_string_ptr = Box::into_raw(Box::new(error_string.as_ptr())) as *mut c_char; }); std::mem::forget(query_engine); @@ -643,7 +643,7 @@ pub unsafe extern "C" fn prisma_apply_pending_migrations( Err(err) => { RUNTIME.block_on(async { let mut error_string = query_engine.error_string.write().await; - *error_string = CString::new(err.message()).unwrap(); + *error_string = CString::new(err.to_string()).unwrap(); *error_string_ptr = error_string.as_ptr() as *mut c_char; }); std::mem::forget(query_engine); diff --git a/query-engine/query-engine-c-abi/src/lib.rs b/query-engine/query-engine-c-abi/src/lib.rs index e2a2f0d43ddc..f41e48fdfad0 100644 --- a/query-engine/query-engine-c-abi/src/lib.rs +++ b/query-engine/query-engine-c-abi/src/lib.rs @@ -1,9 +1,5 @@ -use query_engine_common::error; - -pub mod engine; -pub mod logger; -pub mod migrations; +mod engine; +mod logger; +mod migrations; mod tracer; - -pub(crate) type Result = std::result::Result; diff --git a/query-engine/query-engine-c-abi/src/migrations.rs b/query-engine/query-engine-c-abi/src/migrations.rs index ffba8e2b39ee..395e64486828 100644 --- a/query-engine/query-engine-c-abi/src/migrations.rs +++ b/query-engine/query-engine-c-abi/src/migrations.rs @@ -1,10 +1,11 @@ use indoc::indoc; +use query_engine_common::error::ApiError; +use query_engine_common::Result; use rusqlite::Connection; use std::{ fs::{read_dir, DirEntry}, path::{Path, PathBuf}, }; -use user_facing_errors::{Error, UnknownError}; pub type Timestamp = chrono::DateTime; @@ -17,12 +18,6 @@ pub struct MigrationDirectory { } impl MigrationDirectory { - /// Initialize a MigrationDirectory at the provided path. This will not - /// validate that the path is valid and exists. - pub fn new(path: PathBuf) -> MigrationDirectory { - MigrationDirectory { path } - } - /// The `{timestamp}_{name}` formatted migration name. pub fn migration_name(&self) -> &str { self.path @@ -33,9 +28,9 @@ impl MigrationDirectory { } /// Read the migration script to a string. - pub fn read_migration_script(&self) -> Result { + pub fn read_migration_script(&self) -> Result { let path = self.path.join("migration.sql"); - std::fs::read_to_string(path).map_err(|ioerr| UnknownError::new(&ioerr).into()) + std::fs::read_to_string(path).map_err(|err| ApiError::Configuration(err.to_string())) } } @@ -59,19 +54,22 @@ pub struct MigrationRecord { pub started_at: Timestamp, } -pub fn list_migration_dir(migrations_directory_path: &Path) -> Result, Error> { +pub fn list_migration_dir(migrations_directory_path: &Path) -> Result> { let mut entries: Vec = Vec::new(); let read_dir_entries = match read_dir(migrations_directory_path) { Ok(read_dir_entries) => read_dir_entries, - // Err(err) if matches!(err.kind(), std::io::ErrorKind::NotFound) => return Ok(entries), - Err(err) => return Err(UnknownError::new(&err).into()), + Err(err) => return Err(ApiError::Configuration(err.to_string())), }; for entry in read_dir_entries { - let entry = entry.map_err(|err| UnknownError::new(&err))?; + let entry = entry.map_err(|err| ApiError::Configuration(err.to_string()))?; - if entry.file_type().map_err(|err| UnknownError::new(&err))?.is_dir() { + if entry + .file_type() + .map_err(|err| ApiError::Configuration(err.to_string()))? + .is_dir() + { entries.push(entry.into()); } } @@ -121,8 +119,8 @@ pub fn list_migration_dir(migrations_directory_path: &Path) -> Result Result, Error> { - let conn = Connection::open(database_filename).map_err(|err| UnknownError::new(&err))?; +pub fn list_migrations(database_filename: &Path) -> Result> { + let conn = Connection::open(database_filename).map_err(|err| ApiError::Configuration(err.to_string()))?; // Check if the migrations table exists let table_exists = conn @@ -141,13 +139,14 @@ pub fn list_migrations(database_filename: &Path) -> Result, ); "#}; - conn.execute(sql, []).map_err(|err| UnknownError::new(&err))?; + conn.execute(sql, []) + .map_err(|err| ApiError::Configuration(err.to_string()))?; } let mut stmt = conn .prepare("SELECT id, migration_name, started_at, finished_at FROM _prisma_migrations") - .map_err(|err| UnknownError::new(&err))?; - let mut rows = stmt.query([]).map_err(|err| UnknownError::new(&err))?; + .map_err(|err| ApiError::Configuration(err.to_string()))?; + let mut rows = stmt.query([]).map_err(|err| ApiError::Configuration(err.to_string()))?; let mut entries: Vec = Vec::new(); @@ -168,24 +167,25 @@ pub fn list_migrations(database_filename: &Path) -> Result, Ok(entries) } -pub fn record_migration_started(database_filename: &Path, migration_name: &str) -> Result<(), Error> { - let conn = Connection::open(database_filename).map_err(|err| UnknownError::new(&err))?; +pub fn record_migration_started(database_filename: &Path, migration_name: &str) -> Result<()> { + let conn = Connection::open(database_filename).map_err(|err| ApiError::Configuration(err.to_string()))?; let sql = "INSERT INTO _prisma_migrations (id, migration_name) VALUES (?, ?)"; conn.execute(sql, [uuid::Uuid::new_v4().to_string(), migration_name.to_owned()]) - .map_err(|err| UnknownError::new(&err))?; + .map_err(|err| ApiError::Configuration(err.to_string()))?; Ok(()) } -pub fn execute_migration_script(database_filename: &Path, migration_name: &str, script: &str) -> Result<(), Error> { - let conn = Connection::open(database_filename).map_err(|err| UnknownError::new(&err))?; +pub fn execute_migration_script(database_filename: &Path, migration_name: &str, script: &str) -> Result<()> { + let conn = Connection::open(database_filename).map_err(|err| ApiError::Configuration(err.to_string()))?; - conn.execute_batch(script).map_err(|err| UnknownError::new(&err))?; + conn.execute_batch(script) + .map_err(|err| ApiError::Configuration(err.to_string()))?; let sql = "UPDATE _prisma_migrations SET finished_at = current_timestamp WHERE migration_name = ?"; conn.execute(sql, [migration_name]) - .map_err(|err| UnknownError::new(&err))?; + .map_err(|err| ApiError::Configuration(err.to_string()))?; Ok(()) } From b69f07cf632f9018d7fd2d32d83f19b801d0821a Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Thu, 21 Mar 2024 07:02:11 +0100 Subject: [PATCH 18/45] Remove hanging TODO --- schema-engine/core/src/state.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/schema-engine/core/src/state.rs b/schema-engine/core/src/state.rs index 224981af9e0b..9143ef1fb767 100644 --- a/schema-engine/core/src/state.rs +++ b/schema-engine/core/src/state.rs @@ -41,7 +41,6 @@ type ErasedConnectorRequest = Box< >; impl EngineState { - /// TODO pub(crate) fn new(initial_datamodel: Option, host: Option>) -> Self { EngineState { initial_datamodel: initial_datamodel.map(|s| psl::validate(s.into())), From 3a7a7d645985bc61894f4605022b9c311aa47754 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Thu, 21 Mar 2024 08:05:14 +0100 Subject: [PATCH 19/45] Remove unnecessary from serde error --- libs/user-facing-errors/src/lib.rs | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/libs/user-facing-errors/src/lib.rs b/libs/user-facing-errors/src/lib.rs index 03cb0438e5b3..7d7856831637 100644 --- a/libs/user-facing-errors/src/lib.rs +++ b/libs/user-facing-errors/src/lib.rs @@ -216,16 +216,3 @@ impl From for Error { } } } - -impl From for Error { - fn from(serde_error: serde_json::Error) -> Self { - Error { - is_panic: false, - inner: ErrorType::Unknown(UnknownError { - message: format!("{}", serde_error), - backtrace: None, - }), - batch_request_idx: None, - } - } -} From 1f02632868c6dc155548d4f0ee18ad7f4c4ea2a8 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Thu, 21 Mar 2024 12:20:40 +0100 Subject: [PATCH 20/45] Add failed at field to migrations table and throw on failed migration --- query-engine/query-engine-c-abi/src/engine.rs | 6 +- .../query-engine-c-abi/src/migrations.rs | 90 +++++++++---------- 2 files changed, 45 insertions(+), 51 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index bd4abb715a92..5396d8527d86 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -1,7 +1,8 @@ use crate::{ logger::Logger, migrations::{ - execute_migration_script, list_migration_dir, list_migrations, record_migration_started, MigrationDirectory, + detect_failed_migrations, execute_migration_script, list_migration_dir, list_migrations, + record_migration_started, MigrationDirectory, }, }; use once_cell::sync::Lazy; @@ -369,8 +370,7 @@ impl QueryEngine { }) .collect(); - // TODO enable this later - // detect_failed_migrations(&migrations_from_database)?; + detect_failed_migrations(&migrations_from_database)?; let mut applied_migration_names: Vec = Vec::with_capacity(unapplied_migrations.len()); diff --git a/query-engine/query-engine-c-abi/src/migrations.rs b/query-engine/query-engine-c-abi/src/migrations.rs index 395e64486828..2edc2bd5226b 100644 --- a/query-engine/query-engine-c-abi/src/migrations.rs +++ b/query-engine/query-engine-c-abi/src/migrations.rs @@ -52,6 +52,8 @@ pub struct MigrationRecord { pub migration_name: String, /// The time the migration started being applied. pub started_at: Timestamp, + /// The time the migration failed + pub failed_at: Option, } pub fn list_migration_dir(migrations_directory_path: &Path) -> Result> { @@ -79,45 +81,26 @@ pub fn list_migration_dir(migrations_directory_path: &Path) -> Result Result<(), user_facing_errors::Error> { -// use std::fmt::Write as _; - -// tracing::debug!("Checking for failed migrations."); - -// let mut failed_migrations = migrations_from_database -// .iter() -// .filter(|migration| migration.finished_at.is_none() && migration.rolled_back_at.is_none()) -// .peekable(); - -// if failed_migrations.peek().is_none() { -// return Ok(()); -// } - -// let mut details = String::new(); - -// for failed_migration in failed_migrations { -// let logs = failed_migration -// .logs -// .as_deref() -// .map(|s| s.trim()) -// .filter(|s| !s.is_empty()) -// .map(|s| format!(" with the following logs:\n{s}")) -// .unwrap_or_default(); - -// writeln!( -// details, -// "The `{name}` migration started at {started_at} failed{logs}", -// name = failed_migration.migration_name, -// started_at = failed_migration.started_at, -// ) -// .unwrap(); -// } - -// // Err(user_facing(FoundFailedMigrations { details })) -// Err(user_facing_errors::Error::from( -// user_facing_errors::common::FoundFailedMigrations { details }, -// )) -// } +pub fn detect_failed_migrations(migrations_from_database: &[MigrationRecord]) -> Result<()> { + tracing::debug!("Checking for failed migrations."); + + let mut failed_migrations = migrations_from_database + .iter() + .filter(|migration| migration.finished_at.is_none() && migration.failed_at.is_none()) + .peekable(); + + if failed_migrations.peek().is_none() { + return Ok(()); + } else { + Err(ApiError::Configuration( + format!( + "Failed migration detected: {}", + failed_migrations.peek().unwrap().migration_name + ) + .to_string(), + )) + } +} pub fn list_migrations(database_filename: &Path) -> Result> { let conn = Connection::open(database_filename).map_err(|err| ApiError::Configuration(err.to_string()))?; @@ -135,7 +118,8 @@ pub fn list_migrations(database_filename: &Path) -> Result> "id" TEXT PRIMARY KEY NOT NULL, "finished_at" DATETIME, "migration_name" TEXT NOT NULL, - "started_at" DATETIME NOT NULL DEFAULT current_timestamp + "started_at" DATETIME NOT NULL DEFAULT current_timestamp, + "failed_at" DATETIME ); "#}; @@ -144,7 +128,7 @@ pub fn list_migrations(database_filename: &Path) -> Result> } let mut stmt = conn - .prepare("SELECT id, migration_name, started_at, finished_at FROM _prisma_migrations") + .prepare("SELECT id, migration_name, started_at, finished_at, failed_at FROM _prisma_migrations") .map_err(|err| ApiError::Configuration(err.to_string()))?; let mut rows = stmt.query([]).map_err(|err| ApiError::Configuration(err.to_string()))?; @@ -155,12 +139,14 @@ pub fn list_migrations(database_filename: &Path) -> Result> let migration_name: String = row.get(1).unwrap(); let started_at: Timestamp = row.get(2).unwrap(); let finished_at: Option = row.get(3).unwrap(); + let failed_at: Option = row.get(4).unwrap(); entries.push(MigrationRecord { id, migration_name, started_at, finished_at, + failed_at, }); } @@ -180,12 +166,20 @@ pub fn record_migration_started(database_filename: &Path, migration_name: &str) pub fn execute_migration_script(database_filename: &Path, migration_name: &str, script: &str) -> Result<()> { let conn = Connection::open(database_filename).map_err(|err| ApiError::Configuration(err.to_string()))?; - conn.execute_batch(script) - .map_err(|err| ApiError::Configuration(err.to_string()))?; - - let sql = "UPDATE _prisma_migrations SET finished_at = current_timestamp WHERE migration_name = ?"; - conn.execute(sql, [migration_name]) - .map_err(|err| ApiError::Configuration(err.to_string()))?; + let migration_result = conn.execute_batch(script); - Ok(()) + match migration_result { + Ok(_) => { + let sql = "UPDATE _prisma_migrations SET finished_at = current_timestamp WHERE migration_name = ?"; + conn.execute(sql, [migration_name]) + .map_err(|err| ApiError::Configuration(err.to_string()))?; + Ok(()) + } + Err(err) => { + let sql = "UPDATE _prisma_migrations SET failed_at = current_timestamp WHERE migration_name = ?"; + conn.execute(sql, [migration_name]) + .map_err(|err| ApiError::Configuration(err.to_string()))?; + Err(ApiError::Configuration(err.to_string())) + } + } } From 43cd408d65293d4ef3e5d61900e270d34b19354e Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 22 Mar 2024 09:02:32 +0100 Subject: [PATCH 21/45] Update query-engine/query-engine-c-abi/src/engine.rs Co-authored-by: Flavian Desverne --- query-engine/query-engine-c-abi/src/engine.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 5396d8527d86..141c9754c10d 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -464,7 +464,7 @@ impl QueryEngine { // This API is meant to be stateless. This means the box pointer to the query engine structure will be returned to the // calling code and should be passed to subsequent calls // -// Be should be careful about not de-allocating the pointer +// We should be careful about not de-allocating the pointer // when adding a new function remember to always call mem::forget /// # Safety From d607828bca458257eb59eda82bc2743e4542ee92 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 22 Mar 2024 14:17:21 +0100 Subject: [PATCH 22/45] Update react native tests to take real emulator url --- .../query-tests-setup/src/config.rs | 19 +++++++++++++++++++ .../src/connector_tag/js/external_process.rs | 3 ++- .../test-configs/react-native | 3 ++- .../driver-adapters/executor/src/rn.ts | 15 ++++++--------- .../driver-adapters/executor/src/testd.ts | 3 +++ 5 files changed, 32 insertions(+), 11 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index af504a5a4da3..0316df48f3b1 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -45,6 +45,11 @@ pub struct TestConfig { /// Env key: `EXTERNAL_TEST_EXECUTOR` pub(crate) external_test_executor: Option, + // For mobile tests a running device with a valid http server is required. + // This is the URL to the mobile emulator which will execute the queries against + // the instances of the engine running on the device. + pub(crate) mobile_emulator_url: Option, + /// The driver adapter to use when running tests, will be forwarded to the external test /// executor by setting the `DRIVER_ADAPTER` env var when spawning the executor process pub(crate) driver_adapter: Option, @@ -88,6 +93,7 @@ And optionally, to test driver adapters - EXTERNAL_TEST_EXECUTOR - DRIVER_ADAPTER - DRIVER_ADAPTER_CONFIG (optional, not required by all driver adapters) +- MOBILE_EMULATOR_URL (optional, only required by mobile external test executor) 📁 Config file @@ -149,6 +155,8 @@ impl TestConfig { .map(|config| serde_json::from_str::(config.as_str()).ok()) .unwrap_or_default(); + let mobile_emulator_url = std::env::var("MOBILE_EMULATOR_URL").ok(); + // Just care for a set value for now. let is_ci = std::env::var("BUILDKITE").is_ok(); @@ -159,6 +167,7 @@ impl TestConfig { external_test_executor, driver_adapter, driver_adapter_config, + mobile_emulator_url, }) } @@ -246,6 +255,12 @@ impl TestConfig { } if let Some(external_test_executor) = &self.external_test_executor { + if *external_test_executor == TestExecutor::Mobile && self.mobile_emulator_url.is_none() { + exit_with_message( + "When using the mobile external test executor, the mobile emulator URL (MOBILE_EMULATOR_URL env var) must be set.", + ); + } + if *external_test_executor != TestExecutor::Mobile && self.driver_adapter.is_none() { exit_with_message( "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", @@ -324,6 +339,10 @@ impl TestConfig { "PRISMA_DISABLE_QUAINT_EXECUTORS".to_string(), "1".to_string(), ), + ( + "MOBILE_EMULATOR_URL".to_string(), + self.mobile_emulator_url.clone().unwrap_or_default() + ), ) } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs index 06d1551f9405..9db9556137f4 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -196,8 +196,9 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { .build() .unwrap() .block_on(async move { + let environment = CONFIG.for_external_executor(); let process = match Command::new(&path) - .envs(CONFIG.for_external_executor()) + .envs(environment) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::inherit()) diff --git a/query-engine/connector-test-kit-rs/test-configs/react-native b/query-engine/connector-test-kit-rs/test-configs/react-native index a26c5a358802..858347a7e6a4 100644 --- a/query-engine/connector-test-kit-rs/test-configs/react-native +++ b/query-engine/connector-test-kit-rs/test-configs/react-native @@ -1,5 +1,6 @@ { "connector": "sqlite", "version": "react-native", - "external_test_executor": "Mobile" + "external_test_executor": "Mobile", + "mobile_emulator_url": "http://localhost:3000" } diff --git a/query-engine/driver-adapters/executor/src/rn.ts b/query-engine/driver-adapters/executor/src/rn.ts index ad451283e0fc..d3c62b3e40e7 100644 --- a/query-engine/driver-adapters/executor/src/rn.ts +++ b/query-engine/driver-adapters/executor/src/rn.ts @@ -3,9 +3,6 @@ export function createRNEngineConnector( schema: string, logCallback: (msg: string) => void ) { - const port = "3000"; - const baseIP = "192.168.178.20"; - const deviceUrl = `http://${baseIP}:${port}`; const headers = { "Content-Type": "application/json", Accept: "application/json", @@ -13,7 +10,7 @@ export function createRNEngineConnector( return { connect: async () => { - const res = await fetch(`${deviceUrl}/connect`, { + const res = await fetch(`${url}/connect`, { method: "POST", mode: "no-cors", headers, @@ -27,7 +24,7 @@ export function createRNEngineConnector( trace: string, txId: string ): Promise => { - const res = await fetch(`${deviceUrl}/query`, { + const res = await fetch(`${url}/query`, { method: "POST", mode: "no-cors", headers, @@ -47,7 +44,7 @@ export function createRNEngineConnector( return response.engineResponse; }, startTransaction: async (body: string, trace: string): Promise => { - const res = await fetch(`${deviceUrl}/start_transaction`, { + const res = await fetch(`${url}/start_transaction`, { method: "POST", mode: "no-cors", headers, @@ -59,7 +56,7 @@ export function createRNEngineConnector( return await res.json(); }, commitTransaction: async (txId: string, trace: string): Promise => { - const res = await fetch(`${deviceUrl}/commit_transaction`, { + const res = await fetch(`${url}/commit_transaction`, { method: "POST", mode: "no-cors", headers, @@ -74,7 +71,7 @@ export function createRNEngineConnector( txId: string, trace: string ): Promise => { - const res = await fetch(`${deviceUrl}/rollback_transaction`, { + const res = await fetch(`${url}/rollback_transaction`, { method: "POST", mode: "no-cors", headers, @@ -86,7 +83,7 @@ export function createRNEngineConnector( return res.json(); }, disconnect: async (trace: string) => { - await fetch(`${deviceUrl}/disconnect`, { + await fetch(`${url}/disconnect`, { method: "POST", mode: "no-cors", headers, diff --git a/query-engine/driver-adapters/executor/src/testd.ts b/query-engine/driver-adapters/executor/src/testd.ts index cc9a3de3cf4c..1a9ffa208e2c 100644 --- a/query-engine/driver-adapters/executor/src/testd.ts +++ b/query-engine/driver-adapters/executor/src/testd.ts @@ -220,6 +220,9 @@ function respondOk(requestId: number, payload: unknown) { async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[qe.QueryEngine, ErrorCapturingDriverAdapter]> { if(process.env.EXTERNAL_TEST_EXECUTOR === "Mobile") { + if(process.env.MOBILE_EMULATOR_URL) { + url = process.env.MOBILE_EMULATOR_URL + } const engineInstance = createRNEngineConnector(url, prismaSchema, logCallback) return [engineInstance, {} as any]; } else { From 3e5f92e9897b65f775aa956c35cf8216d0bce9d9 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 22 Mar 2024 14:22:57 +0100 Subject: [PATCH 23/45] Return null adapter on RN case and handle null case --- query-engine/driver-adapters/executor/src/testd.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/executor/src/testd.ts b/query-engine/driver-adapters/executor/src/testd.ts index 1a9ffa208e2c..e15ad08a88ca 100644 --- a/query-engine/driver-adapters/executor/src/testd.ts +++ b/query-engine/driver-adapters/executor/src/testd.ts @@ -79,7 +79,7 @@ async function main(): Promise { const state: Record = {} @@ -97,6 +97,7 @@ async function handleRequest(method: string, params: unknown): Promise const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { logs.push(log) }); + await engine.connect("") state[castParams.schemaId] = { @@ -123,7 +124,7 @@ async function handleRequest(method: string, params: unknown): Promise if (parsedResult.errors) { const error = parsedResult.errors[0]?.user_facing_error if (error.error_code === 'P2036') { - const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id) + const jsError = state[castParams.schemaId].adapter?.errorRegistry.consumeError(error.meta.id) if (!jsError) { err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) } else { @@ -218,13 +219,13 @@ function respondOk(requestId: number, payload: unknown) { console.log(JSON.stringify(msg)) } -async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[qe.QueryEngine, ErrorCapturingDriverAdapter]> { +async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[qe.QueryEngine, ErrorCapturingDriverAdapter | null]> { if(process.env.EXTERNAL_TEST_EXECUTOR === "Mobile") { if(process.env.MOBILE_EMULATOR_URL) { url = process.env.MOBILE_EMULATOR_URL } const engineInstance = createRNEngineConnector(url, prismaSchema, logCallback) - return [engineInstance, {} as any]; + return [engineInstance, null]; } else { const engineType = process.env.EXTERNAL_TEST_EXECUTOR === "Wasm" ? "Wasm" : "Napi"; const adapter = await adapterFromEnv(url) as DriverAdapter From f283addb81e0eff77ac58a144460575775198f35 Mon Sep 17 00:00:00 2001 From: Sergey Tatarintsev Date: Fri, 22 Mar 2024 14:43:45 +0100 Subject: [PATCH 24/45] Fix tests --- .github/workflows/codspeed.yml | 2 +- query-engine/request-handlers/Cargo.toml | 5 ++++- query-engine/request-handlers/src/load_executor.rs | 1 + 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codspeed.yml b/.github/workflows/codspeed.yml index 96263f590079..285c8c9fbd54 100644 --- a/.github/workflows/codspeed.yml +++ b/.github/workflows/codspeed.yml @@ -31,7 +31,7 @@ jobs: run: cargo codspeed build -p schema --features all_connectors - name: "Build the benchmark targets: request-handlers" - run: cargo codspeed build -p request-handlers --features native + run: cargo codspeed build -p request-handlers --features native,all - name: Run the benchmarks uses: CodSpeedHQ/action@v2 diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index 28161f7f33a7..870ab991ad21 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -42,7 +42,10 @@ driver-adapters = ["sql-query-connector/driver-adapters"] native = ["sql-query-connector/native"] all = [ "mongodb", - "sql", + "mysql", + "sqlite", + "postgresql", + "cockroachdb", "graphql-protocol", "sql-query-connector/all", "psl/all", diff --git a/query-engine/request-handlers/src/load_executor.rs b/query-engine/request-handlers/src/load_executor.rs index ded539fead50..1a74ef03002d 100644 --- a/query-engine/request-handlers/src/load_executor.rs +++ b/query-engine/request-handlers/src/load_executor.rs @@ -80,6 +80,7 @@ mod native { use super::*; use tracing::trace; + #[cfg(feature = "sqlite")] pub(crate) async fn sqlite( source: &Datasource, url: &str, From 109074968c5563d74ed4c9e22bf0a58f10099550 Mon Sep 17 00:00:00 2001 From: Sergey Tatarintsev Date: Fri, 22 Mar 2024 15:33:40 +0100 Subject: [PATCH 25/45] Fix mssql & cockroach --- query-engine/request-handlers/Cargo.toml | 2 ++ query-engine/request-handlers/src/load_executor.rs | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index 870ab991ad21..802d72344f7e 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -38,6 +38,7 @@ postgresql = ["sql", "sql-query-connector/postgresql", "psl/postgresql"] mysql = ["sql", "sql-query-connector/mysql", "psl/mysql"] sqlite = ["sql", "sql-query-connector/sqlite", "psl/sqlite"] cockroachdb = ["sql", "sql-query-connector/postgresql", "psl/cockroachdb"] +mssql = ["sql", "sql-query-connector/mssql", "psl/mssql"] driver-adapters = ["sql-query-connector/driver-adapters"] native = ["sql-query-connector/native"] all = [ @@ -46,6 +47,7 @@ all = [ "sqlite", "postgresql", "cockroachdb", + "mssql", "graphql-protocol", "sql-query-connector/all", "psl/all", diff --git a/query-engine/request-handlers/src/load_executor.rs b/query-engine/request-handlers/src/load_executor.rs index 1a74ef03002d..0a289cd80adc 100644 --- a/query-engine/request-handlers/src/load_executor.rs +++ b/query-engine/request-handlers/src/load_executor.rs @@ -51,7 +51,7 @@ pub async fn load( p if POSTGRES.is_provider(p) => native::postgres(datasource, &url, features).await, #[cfg(feature = "mssql")] p if MSSQL.is_provider(p) => native::mssql(datasource, &url, features).await, - #[cfg(feature = "cockroach")] + #[cfg(feature = "cockroachdb")] p if COCKROACH.is_provider(p) => native::postgres(datasource, &url, features).await, #[cfg(feature = "mongodb")] p if MONGODB.is_provider(p) => native::mongodb(datasource, &url, features).await, From 5e3ec08e0f2b21256f9d3d6b878c8b8cea490d6f Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 22 Mar 2024 16:31:34 +0100 Subject: [PATCH 26/45] Pass raw error pointer to C context and resign de allocation to calling function --- query-engine/query-engine-c-abi/src/engine.rs | 45 +++++++++---------- 1 file changed, 21 insertions(+), 24 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 141c9754c10d..d4817414fb3f 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -49,8 +49,6 @@ pub static PRISMA_MISSING_POINTER: i32 = 2; /// This struct holds an instance of the prisma query engine /// You can instanciate as many as you want pub struct QueryEngine { - // This String pointer will be pointed to by passed error pointer and will allow calling code to have a bit more information when something goes wrong - error_string: RwLock, inner: RwLock, base_path: Option, logger: Logger, @@ -68,8 +66,7 @@ pub struct ConstructorOptionsNative { pub struct ConstructorOptions { id: *const c_char, datamodel: *const c_char, - // Used on iOS/Android to navigate to the sandboxed app folder to execute all file operations - base_path: *const c_char, + base_path: *const c_char, // Used on iOS/Android to navigate to the sandboxed app folder to execute all file operations log_level: *const c_char, log_queries: bool, datasource_overrides: *const c_char, @@ -176,7 +173,6 @@ impl QueryEngine { ); Ok(Self { - error_string: RwLock::new(CString::new("").unwrap()), inner: RwLock::new(Inner::Builder(builder)), base_path, logger, @@ -468,10 +464,14 @@ impl QueryEngine { // when adding a new function remember to always call mem::forget /// # Safety -/// /// The calling context needs to pass a valid pointer that will store the reference +/// The calling context also need to clear the pointer of the error string if it is not null #[no_mangle] -pub unsafe extern "C" fn prisma_create(options: ConstructorOptions, qe_ptr: *mut *mut QueryEngine) -> c_int { +pub unsafe extern "C" fn prisma_create( + options: ConstructorOptions, + qe_ptr: *mut *mut QueryEngine, + error_string_ptr: *mut *mut c_char, +) -> c_int { if qe_ptr.is_null() { return PRISMA_MISSING_POINTER; } @@ -482,8 +482,9 @@ pub unsafe extern "C" fn prisma_create(options: ConstructorOptions, qe_ptr: *mut *qe_ptr = Box::into_raw(Box::new(v)); PRISMA_OK } - Err(e) => { - println!("Error creating query engine {:#?}", e); + Err(err) => { + let error_string = CString::new(err.to_string()).unwrap(); + *error_string_ptr = error_string.into_raw() as *mut c_char; PRISMA_UNKNOWN_ERROR } } @@ -492,6 +493,7 @@ pub unsafe extern "C" fn prisma_create(options: ConstructorOptions, qe_ptr: *mut /// # Safety /// /// The calling context needs to pass a valid pointer that will store the reference to the error string +/// The calling context also need to clear the pointer of the error string if it is not null #[no_mangle] pub unsafe extern "C" fn prisma_connect( qe: *mut QueryEngine, @@ -508,11 +510,8 @@ pub unsafe extern "C" fn prisma_connect( PRISMA_OK } Err(err) => { - RUNTIME.block_on(async { - let mut error_string = query_engine.error_string.write().await; - *error_string = CString::new(err.to_string()).unwrap(); - *error_string_ptr = error_string.as_ptr() as *mut c_char; - }); + let error_string = CString::new(err.to_string()).unwrap(); + *error_string_ptr = error_string.into_raw() as *mut c_char; std::mem::forget(query_engine); PRISMA_UNKNOWN_ERROR } @@ -522,6 +521,7 @@ pub unsafe extern "C" fn prisma_connect( /// # Safety /// /// The calling context needs to pass a valid pointer that will store the reference to the error string +/// The calling context also need to clear the pointer of the error string if it is not null #[no_mangle] pub unsafe extern "C" fn prisma_query( qe: *mut QueryEngine, @@ -539,11 +539,9 @@ pub unsafe extern "C" fn prisma_query( CString::new(query_result).unwrap().into_raw() } Err(err) => { - RUNTIME.block_on(async { - let mut error_string = query_engine.error_string.write().await; - *error_string = CString::new(err.to_string()).unwrap(); - *error_string_ptr = Box::into_raw(Box::new(error_string.as_ptr())) as *mut c_char; - }); + let error_string = CString::new(err.to_string()).unwrap(); + *error_string_ptr = error_string.into_raw() as *mut c_char; + std::mem::forget(query_engine); null_mut() } @@ -553,6 +551,7 @@ pub unsafe extern "C" fn prisma_query( /// # Safety /// /// The calling context needs to pass a valid pointer that will store the reference to the error string +/// The calling context also need to clear the pointer of the error string if it is not null #[no_mangle] pub unsafe extern "C" fn prisma_start_transaction( qe: *mut QueryEngine, @@ -626,6 +625,7 @@ pub unsafe extern "C" fn prisma_disconnect(qe: *mut QueryEngine, header_str: *co /// # Safety /// /// The calling context needs to pass a valid pointer that will store the reference to the error string +/// The calling context also need to clear the pointer of the error string if it is not null #[no_mangle] pub unsafe extern "C" fn prisma_apply_pending_migrations( qe: *mut QueryEngine, @@ -641,11 +641,8 @@ pub unsafe extern "C" fn prisma_apply_pending_migrations( PRISMA_OK } Err(err) => { - RUNTIME.block_on(async { - let mut error_string = query_engine.error_string.write().await; - *error_string = CString::new(err.to_string()).unwrap(); - *error_string_ptr = error_string.as_ptr() as *mut c_char; - }); + let error_string = CString::new(err.to_string()).unwrap(); + *error_string_ptr = error_string.into_raw() as *mut c_char; std::mem::forget(query_engine); PRISMA_UNKNOWN_ERROR } From 23fe69d2ee8b1b30d4bd66218845012b3d5ca2d1 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 22 Mar 2024 17:13:34 +0100 Subject: [PATCH 27/45] Serialize API errors on start, commit and rollback transaction --- query-engine/query-engine-c-abi/Makefile | 4 +--- query-engine/query-engine-c-abi/copy-ios.sh | 2 -- query-engine/query-engine-c-abi/src/engine.rs | 13 +++++++++---- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/query-engine/query-engine-c-abi/Makefile b/query-engine/query-engine-c-abi/Makefile index 1688ace84c08..5b9c1a7fae92 100644 --- a/query-engine/query-engine-c-abi/Makefile +++ b/query-engine/query-engine-c-abi/Makefile @@ -9,7 +9,6 @@ ARCH_IOS_SIM = aarch64-apple-ios-sim ARCHS_IOS = x86_64-apple-ios aarch64-apple-ios aarch64-apple-ios-sim -# ARCHS_IOS = aarch64-apple-ios aarch64-apple-ios-sim ARCHS_ANDROID = aarch64-linux-android armv7-linux-androideabi x86_64-linux-android i686-linux-android LIB = libquery_engine.a XCFRAMEWORK = QueryEngine.xcframework @@ -52,8 +51,7 @@ $(ARCHS_IOS): %: cargo build --release --target $@ $(XCFRAMEWORK): $(ARCHS_IOS) -# Library is too large to be published with all the targets on NPM. Therefore we are not shipping the intel version (for older intel macs) lipo -create $(wildcard ../../target/x86_64-apple-ios/release/$(LIB)) $(wildcard ../../target/aarch64-apple-ios-sim/release/$(LIB)) -output simulator_fat/libquery_engine.a xcodebuild -create-xcframework -library $(wildcard ../../target/aarch64-apple-ios/release/$(LIB)) -headers include -library simulator_fat/libquery_engine.a -headers include -output $@ - # xcodebuild -create-xcframework -library $(wildcard ../../target/aarch64-apple-ios/release/$(LIB)) -headers include -library $(wildcard ../../target/aarch64-apple-ios-sim/release/$(LIB)) -headers include -output $@ + ./copy-ios.sh \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/copy-ios.sh b/query-engine/query-engine-c-abi/copy-ios.sh index c8eb73e89126..f1d731a87929 100755 --- a/query-engine/query-engine-c-abi/copy-ios.sh +++ b/query-engine/query-engine-c-abi/copy-ios.sh @@ -7,8 +7,6 @@ TARGET_DIR=../../../react-native-prisma # This one is not actually necessary but XCode picks it up and mixes up versions cp ./include/query_engine.h $TARGET_DIR/cpp/query_engine.h -rm -rf "$TARGET_DIR/QueryEngine.xcframework" - cp -R QueryEngine.xcframework "$TARGET_DIR/QueryEngine.xcframework" pingme "✅ Prisma iOS Finished" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index d4817414fb3f..a9a6464d40ab 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -92,6 +92,11 @@ fn map_known_error(err: query_core::CoreError) -> Result { Ok(value) } +fn serialize_api_error(err: ApiError) -> String { + let user_error: user_facing_errors::Error = err.into(); + serde_json::to_string(&user_error).unwrap() +} + // Struct that holds an internal prisma engine // the inner prop holds the internal state, it starts as a Builder // meaning it is not connected to the database @@ -565,9 +570,9 @@ pub unsafe extern "C" fn prisma_start_transaction( std::mem::forget(query_engine); CString::new(query_result).unwrap().into_raw() } - Err(_err) => { + Err(err) => { std::mem::forget(query_engine); - null_mut() + CString::new(serialize_api_error(err)).unwrap().into_raw() } } } @@ -586,7 +591,7 @@ pub unsafe extern "C" fn prisma_commit_transaction( std::mem::forget(query_engine); match result { Ok(query_result) => CString::new(query_result).unwrap().into_raw(), - Err(_err) => null_mut(), + Err(err) => CString::new(serialize_api_error(err)).unwrap().into_raw(), } } @@ -604,7 +609,7 @@ pub unsafe extern "C" fn prisma_rollback_transaction( std::mem::forget(query_engine); match result { Ok(query_result) => CString::new(query_result).unwrap().into_raw(), - Err(_err) => null_mut(), + Err(err) => CString::new(serialize_api_error(err)).unwrap().into_raw(), } } From 15d11aaaafc33f6516faaabfdc4b3c1944bb175b Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 22 Mar 2024 17:27:43 +0100 Subject: [PATCH 28/45] Re introduce dispatcher and spans --- query-engine/query-engine-c-abi/src/engine.rs | 116 +++++++++--------- 1 file changed, 58 insertions(+), 58 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index a9a6464d40ab..e36397e6447c 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -309,32 +309,33 @@ impl QueryEngine { /// Disconnect and drop the core. Can be reconnected later with `#connect`. pub async fn disconnect(&self, trace_str: *const c_char) -> Result<()> { let trace = get_cstr_safe(trace_str).expect("Trace is needed"); - // let dispatcher = self.logger.dispatcher(); - - let span = tracing::info_span!("prisma:engine:disconnect"); - let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); - - // async move { - let mut inner = self.inner.write().await; - let engine = inner.as_engine()?; - - let builder = EngineBuilder { - schema: engine.schema.clone(), - engine_protocol: engine.engine_protocol(), - native: EngineBuilderNative { - config_dir: engine.native.config_dir.clone(), - env: engine.native.env.clone(), - }, - }; + let dispatcher = self.logger.dispatcher(); + async { + let span = tracing::info_span!("prisma:engine:disconnect"); + let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + async { + let mut inner = self.inner.write().await; + let engine = inner.as_engine()?; + + let builder = EngineBuilder { + schema: engine.schema.clone(), + engine_protocol: engine.engine_protocol(), + native: EngineBuilderNative { + config_dir: engine.native.config_dir.clone(), + env: engine.native.env.clone(), + }, + }; - *inner = Inner::Builder(builder); + *inner = Inner::Builder(builder); - Ok(()) - // } - // .instrument(span) - // .await - // .with_subscriber(dispatcher) - // .await + Ok(()) + } + .instrument(span) + .await + } + .with_subscriber(dispatcher) + .await } async unsafe fn apply_migrations(&self, migration_folder_path: *const c_char) -> Result<()> { @@ -395,26 +396,25 @@ impl QueryEngine { let inner = self.inner.read().await; let engine = inner.as_engine()?; - // TODO(osp) check with team about this dispatcher - // let dispatcher = self.logger.dispatcher(); - - // async move { - let span = tracing::info_span!("prisma:engine:itx_runner", user_facing = true, itx_id = field::Empty); - telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + let dispatcher = self.logger.dispatcher(); - let tx_opts: TransactionOptions = serde_json::from_str(&input)?; - match engine - .executor() - .start_tx(engine.query_schema().clone(), engine.engine_protocol(), tx_opts) - .instrument(span) - .await - { - Ok(tx_id) => Ok(json!({ "id": tx_id.to_string() }).to_string()), - Err(err) => Ok(map_known_error(err)?), + async move { + let span = tracing::info_span!("prisma:engine:itx_runner", user_facing = true, itx_id = field::Empty); + telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + let tx_opts: TransactionOptions = serde_json::from_str(&input)?; + match engine + .executor() + .start_tx(engine.query_schema().clone(), engine.engine_protocol(), tx_opts) + .instrument(span) + .await + { + Ok(tx_id) => Ok(json!({ "id": tx_id.to_string() }).to_string()), + Err(err) => Ok(map_known_error(err)?), + } } - // } - // .with_subscriber(dispatcher) - // .await + .with_subscriber(dispatcher) + .await } // If connected, attempts to commit a transaction with id `tx_id` in the core. @@ -423,16 +423,16 @@ impl QueryEngine { let inner = self.inner.read().await; let engine = inner.as_engine()?; - // let dispatcher = self.logger.dispatcher(); + let dispatcher = self.logger.dispatcher(); - // async move { - match engine.executor().commit_tx(TxId::from(tx_id)).await { - Ok(_) => Ok("{}".to_string()), - Err(err) => Ok(map_known_error(err)?), + async move { + match engine.executor().commit_tx(TxId::from(tx_id)).await { + Ok(_) => Ok("{}".to_string()), + Err(err) => Ok(map_known_error(err)?), + } } - // } - // .with_subscriber(dispatcher) - // .await + .with_subscriber(dispatcher) + .await } // If connected, attempts to roll back a transaction with id `tx_id` in the core. @@ -442,16 +442,16 @@ impl QueryEngine { let inner = self.inner.read().await; let engine = inner.as_engine()?; - // let dispatcher = self.logger.dispatcher(); + let dispatcher = self.logger.dispatcher(); - // async move { - match engine.executor().rollback_tx(TxId::from(tx_id)).await { - Ok(_) => Ok("{}".to_string()), - Err(err) => Ok(map_known_error(err)?), + async move { + match engine.executor().rollback_tx(TxId::from(tx_id)).await { + Ok(_) => Ok("{}".to_string()), + Err(err) => Ok(map_known_error(err)?), + } } - // } - // .with_subscriber(dispatcher) - // .await + .with_subscriber(dispatcher) + .await } } From 8b889970a433bdfd6c36a884527bf73db4667308 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Tue, 26 Mar 2024 06:42:33 +0100 Subject: [PATCH 29/45] Add metrics feature to query-engine-common --- libs/query-engine-common/Cargo.toml | 5 +++-- libs/query-engine-common/src/engine.rs | 2 +- query-engine/query-engine-node-api/Cargo.toml | 4 +++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/libs/query-engine-common/Cargo.toml b/libs/query-engine-common/Cargo.toml index e1e83e1ffb3a..7554bcb7f067 100644 --- a/libs/query-engine-common/Cargo.toml +++ b/libs/query-engine-common/Cargo.toml @@ -3,7 +3,8 @@ name = "query-engine-common" version = "0.1.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +metrics = [] [dependencies] thiserror = "1" @@ -22,7 +23,7 @@ tracing-futures = "0.2" tracing-opentelemetry = "0.17.3" opentelemetry = { version = "0.17" } -[target.'cfg(all(not(target_arch = "wasm32"), not(target_os = "ios"), not(target_os = "android")))'.dependencies] +[target.'cfg(all(not(target_arch = "wasm32")))'.dependencies] query-engine-metrics = { path = "../../query-engine/metrics" } napi.workspace = true diff --git a/libs/query-engine-common/src/engine.rs b/libs/query-engine-common/src/engine.rs index 3e1d1cfef154..96c51584e437 100644 --- a/libs/query-engine-common/src/engine.rs +++ b/libs/query-engine-common/src/engine.rs @@ -58,7 +58,7 @@ pub struct EngineBuilder { pub struct ConnectedEngineNative { pub config_dir: PathBuf, pub env: HashMap, - #[cfg(all(not(target_os = "ios"), not(target_os = "android")))] + #[cfg(feature = "metrics")] pub metrics: Option, } diff --git a/query-engine/query-engine-node-api/Cargo.toml b/query-engine/query-engine-node-api/Cargo.toml index e5233fea4c03..73abee76eaba 100644 --- a/query-engine/query-engine-node-api/Cargo.toml +++ b/query-engine/query-engine-node-api/Cargo.toml @@ -25,7 +25,9 @@ request-handlers = { path = "../request-handlers", features = [ "all", ] } query-connector = { path = "../connectors/query-connector" } -query-engine-common = { path = "../../libs/query-engine-common" } +query-engine-common = { path = "../../libs/query-engine-common", features = [ + "metrics", +] } user-facing-errors = { path = "../../libs/user-facing-errors" } psl = { workspace = true, features = ["all"] } sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector", features = [ From f4a032e0f7d7428601115c31d9cf0e7b02279a9f Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Tue, 26 Mar 2024 07:34:13 +0100 Subject: [PATCH 30/45] change base path missing message to tracing --- query-engine/query-engine-c-abi/src/engine.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index e36397e6447c..6b2f02a07e16 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -66,7 +66,9 @@ pub struct ConstructorOptionsNative { pub struct ConstructorOptions { id: *const c_char, datamodel: *const c_char, - base_path: *const c_char, // Used on iOS/Android to navigate to the sandboxed app folder to execute all file operations + // Used on iOS/Android to navigate to the sandboxed app folder to execute all file operations because file systems are sandboxed + // Take a look at README for a more detailed explanation + base_path: *const c_char, log_level: *const c_char, log_queries: bool, datasource_overrides: *const c_char, @@ -141,7 +143,7 @@ impl QueryEngine { let base_path = get_cstr_safe(constructor_options.base_path); match &base_path { Some(path) => env::set_current_dir(Path::new(&path)).expect("Could not change directory"), - _ => println!("No base path passed!"), + _ => tracing::trace!("No base path provided"), } config From e18bab50b882dd757ddbec652255d2204b355bc7 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Tue, 26 Mar 2024 15:22:11 +0100 Subject: [PATCH 31/45] Fix copy xcframework script --- query-engine/query-engine-c-abi/copy-ios.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/query-engine-c-abi/copy-ios.sh b/query-engine/query-engine-c-abi/copy-ios.sh index f1d731a87929..9a3bb43baf12 100755 --- a/query-engine/query-engine-c-abi/copy-ios.sh +++ b/query-engine/query-engine-c-abi/copy-ios.sh @@ -7,6 +7,6 @@ TARGET_DIR=../../../react-native-prisma # This one is not actually necessary but XCode picks it up and mixes up versions cp ./include/query_engine.h $TARGET_DIR/cpp/query_engine.h -cp -R QueryEngine.xcframework "$TARGET_DIR/QueryEngine.xcframework" +cp -R QueryEngine.xcframework $TARGET_DIR pingme "✅ Prisma iOS Finished" \ No newline at end of file From 40fb1d6068554e7e02e2cf3d9a01e86c6d5273d2 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Tue, 26 Mar 2024 15:28:14 +0100 Subject: [PATCH 32/45] Store URL in the query engine instance --- query-engine/query-engine-c-abi/Makefile | 1 - query-engine/query-engine-c-abi/src/engine.rs | 25 +++++++++---------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/query-engine/query-engine-c-abi/Makefile b/query-engine/query-engine-c-abi/Makefile index 5b9c1a7fae92..83e1d506c4e4 100644 --- a/query-engine/query-engine-c-abi/Makefile +++ b/query-engine/query-engine-c-abi/Makefile @@ -53,5 +53,4 @@ $(ARCHS_IOS): %: $(XCFRAMEWORK): $(ARCHS_IOS) lipo -create $(wildcard ../../target/x86_64-apple-ios/release/$(LIB)) $(wildcard ../../target/aarch64-apple-ios-sim/release/$(LIB)) -output simulator_fat/libquery_engine.a xcodebuild -create-xcframework -library $(wildcard ../../target/aarch64-apple-ios/release/$(LIB)) -headers include -library simulator_fat/libquery_engine.a -headers include -output $@ - ./copy-ios.sh \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 6b2f02a07e16..50ed0887f587 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -32,7 +32,6 @@ use query_engine_common::{ error::ApiError, }; use request_handlers::ConnectorKind; -// use tracing_subscriber::filter::LevelFilter; // The query engine code is async by nature, however the C API does not function with async functions // This runtime is here to allow the C API to block_on it and return the responses in a sync manner @@ -52,6 +51,7 @@ pub struct QueryEngine { inner: RwLock, base_path: Option, logger: Logger, + url: String, } #[repr(C)] @@ -133,6 +133,15 @@ impl QueryEngine { let datamodel = get_cstr_safe(constructor_options.datamodel).expect("Datamodel must be present"); let mut schema = psl::validate(datamodel.into()); + // extract the url for later use in apply_migrations + let url = schema + .configuration + .datasources + .first() + .unwrap() + .load_url(|key| env::var(key).ok()) + .unwrap(); + let config = &mut schema.configuration; schema @@ -183,6 +192,7 @@ impl QueryEngine { inner: RwLock::new(Inner::Builder(builder)), base_path, logger, + url, }) } @@ -348,18 +358,7 @@ impl QueryEngine { let migration_folder_path = Path::new(&migration_folder_path_str); let migrations_from_filesystem = list_migration_dir(migration_folder_path)?; - let inner = self.inner.read().await; - let engine = inner.as_engine()?; - let url = engine - .schema - .configuration - .datasources - .first() - .unwrap() - .load_url(|key| env::var(key).ok()) - .unwrap(); - - let url_without_prefix = url.strip_prefix("file:").unwrap_or(&url); + let url_without_prefix = self.url.strip_prefix("file:").unwrap_or(&url); let database_path = Path::new(url_without_prefix); let migrations_from_database = list_migrations(database_path).unwrap(); From 0e97e0ec34ae84e3598dad7e7785e19290e7d644 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Tue, 26 Mar 2024 15:31:13 +0100 Subject: [PATCH 33/45] Fix non cloned url error and linting --- query-engine/query-engine-c-abi/src/engine.rs | 3 ++- query-engine/query-engine-c-abi/src/migrations.rs | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 50ed0887f587..035eefa9d563 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -358,7 +358,8 @@ impl QueryEngine { let migration_folder_path = Path::new(&migration_folder_path_str); let migrations_from_filesystem = list_migration_dir(migration_folder_path)?; - let url_without_prefix = self.url.strip_prefix("file:").unwrap_or(&url); + let url = self.url.clone(); + let url_without_prefix = url.strip_prefix("file:").unwrap_or(&url); let database_path = Path::new(url_without_prefix); let migrations_from_database = list_migrations(database_path).unwrap(); diff --git a/query-engine/query-engine-c-abi/src/migrations.rs b/query-engine/query-engine-c-abi/src/migrations.rs index 2edc2bd5226b..4cd374705ba5 100644 --- a/query-engine/query-engine-c-abi/src/migrations.rs +++ b/query-engine/query-engine-c-abi/src/migrations.rs @@ -90,7 +90,7 @@ pub fn detect_failed_migrations(migrations_from_database: &[MigrationRecord]) -> .peekable(); if failed_migrations.peek().is_none() { - return Ok(()); + Ok(()) } else { Err(ApiError::Configuration( format!( From 47890da9156681a133402e7b4a5a3e27b8095ee1 Mon Sep 17 00:00:00 2001 From: Sergey Tatarintsev Date: Wed, 3 Apr 2024 15:56:36 +0200 Subject: [PATCH 34/45] Add ReactNative preview feature --- psl/psl-core/src/common/preview_features.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/psl/psl-core/src/common/preview_features.rs b/psl/psl-core/src/common/preview_features.rs index 5c55acd32851..13a077d4072f 100644 --- a/psl/psl-core/src/common/preview_features.rs +++ b/psl/psl-core/src/common/preview_features.rs @@ -78,7 +78,8 @@ features!( UncheckedScalarInputs, Views, RelationJoins, - PrismaSchemaFolder + PrismaSchemaFolder, + ReactNative ); /// Generator preview features (alphabetically sorted) @@ -129,7 +130,7 @@ pub const ALL_PREVIEW_FEATURES: FeatureMap = FeatureMap { | TransactionApi | UncheckedScalarInputs }), - hidden: enumflags2::make_bitflags!(PreviewFeature::{PrismaSchemaFolder}), + hidden: enumflags2::make_bitflags!(PreviewFeature::{PrismaSchemaFolder | ReactNative}), }; #[derive(Debug)] From 8a94ab81d51dd81780e4e8f9611bf284f8415b1b Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 5 Apr 2024 10:19:11 +0200 Subject: [PATCH 35/45] Modify make scripts and add e2e workflow for RN --- .github/workflows/react-native.yml | 66 +++++++++++++++++++ .../query-engine-c-abi/copy-android.sh | 2 +- query-engine/query-engine-c-abi/copy-ios.sh | 2 +- 3 files changed, 68 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/react-native.yml diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml new file mode 100644 index 000000000000..4b759fce4f13 --- /dev/null +++ b/.github/workflows/react-native.yml @@ -0,0 +1,66 @@ +name: "All crates: compilation" +on: + pull_request: + paths-ignore: + - "!.github/workflows/test-compilation.yml" + - ".github/**" + - ".buildkite/**" + - "*.md" + - "LICENSE" + - "CODEOWNERS" + - "renovate.json" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test-rn-e2e: + name: "Check react native e2e" + strategy: + fail-fast: false + runs-on: macos-latest + steps: + - uses: actions/checkout@v4 + + - name: clone react-native-prisma repo + run: git clone http://github.com/prisma/react-native-prisma.git + # The repo needs to be on the same level as the prisma-engines repo so that the make scripts work + working-directory: .. + + - uses: dtolnay/rust-toolchain@stable + + - name: Build iOS Prisma Query Engine + run: make sim + working-directory: query-engine/query-engine-c-abi + + - name: Enable corepack + run: corepack enable yarn + + - name: change example app to use local prisma client + working-directory: ../react-native-prisma/example + run: yarn add @prisma/client@../../packages/client + + - name: Install dependencies + working-directory: ../react-native-prisma + run: yarn + + - name: Setup Ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: 3.1.2 + bundler-cache: true + + - name: Cache CocoaPods + id: cache-cocoapods + uses: actions/cache@v3 + with: + path: ../react-native-prisma/example/ios/Pods + key: ${{ runner.os }}-pods-${{ hashFiles('../react-native-prisma/example/ios/Podfile.lock') }} + restore-keys: | + ${{ runner.os }}-pods- + + - name: Install CocoaPods + working-directory: ../react-native-prisma/example + if: steps.cache-cocoapods.outputs.cache-hit != 'true' + run: cd ios ; pod install ; cd - diff --git a/query-engine/query-engine-c-abi/copy-android.sh b/query-engine/query-engine-c-abi/copy-android.sh index 0711b0ba5fcb..20b1e86b8ec9 100755 --- a/query-engine/query-engine-c-abi/copy-android.sh +++ b/query-engine/query-engine-c-abi/copy-android.sh @@ -15,4 +15,4 @@ cp ../../target/x86_64-linux-android/release/libquery_engine.a $TARGET_DIR/andro cp ./include/query_engine.h $TARGET_DIR/cpp/query_engine.h -pingme "✅ Android compilation ready" \ No newline at end of file +# pingme "✅ Android compilation ready" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/copy-ios.sh b/query-engine/query-engine-c-abi/copy-ios.sh index 9a3bb43baf12..58195b42bb8a 100755 --- a/query-engine/query-engine-c-abi/copy-ios.sh +++ b/query-engine/query-engine-c-abi/copy-ios.sh @@ -9,4 +9,4 @@ cp ./include/query_engine.h $TARGET_DIR/cpp/query_engine.h cp -R QueryEngine.xcframework $TARGET_DIR -pingme "✅ Prisma iOS Finished" \ No newline at end of file +# pingme "✅ Prisma iOS Finished" \ No newline at end of file From e4e2f8c500c06f24e16ddf8b39e08c986f0ea253 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 5 Apr 2024 12:12:42 +0200 Subject: [PATCH 36/45] Tweak CI --- .github/workflows/react-native.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml index 4b759fce4f13..bb0dc4d95bc0 100644 --- a/.github/workflows/react-native.yml +++ b/.github/workflows/react-native.yml @@ -61,6 +61,6 @@ jobs: ${{ runner.os }}-pods- - name: Install CocoaPods - working-directory: ../react-native-prisma/example + working-directory: ../react-native-prisma/example/ios if: steps.cache-cocoapods.outputs.cache-hit != 'true' - run: cd ios ; pod install ; cd - + run: pod install From ef223fc7d5473a0e4b7d491a97d69fc8d26bf4d7 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 5 Apr 2024 12:29:34 +0200 Subject: [PATCH 37/45] Tweak CI --- .github/workflows/react-native.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml index bb0dc4d95bc0..108d8653352f 100644 --- a/.github/workflows/react-native.yml +++ b/.github/workflows/react-native.yml @@ -1,4 +1,4 @@ -name: "All crates: compilation" +name: "React Native E2E Test" on: pull_request: paths-ignore: From c584262c4bda0ccc80d6740d9df465b4293c5b0a Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 5 Apr 2024 12:36:51 +0200 Subject: [PATCH 38/45] Tweak CI --- .github/workflows/react-native.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml index 108d8653352f..a2893cffd99d 100644 --- a/.github/workflows/react-native.yml +++ b/.github/workflows/react-native.yml @@ -30,6 +30,9 @@ jobs: - uses: dtolnay/rust-toolchain@stable + - name: Install rust ios toolchain + run: rustup target add aarch64-apple-ios-sim + - name: Build iOS Prisma Query Engine run: make sim working-directory: query-engine/query-engine-c-abi From 27629a759fa923a49b3bf909b52f9e9b2111cf5e Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 5 Apr 2024 12:44:04 +0200 Subject: [PATCH 39/45] Tweak CI --- .github/workflows/react-native.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml index a2893cffd99d..d9196f4c3990 100644 --- a/.github/workflows/react-native.yml +++ b/.github/workflows/react-native.yml @@ -42,7 +42,7 @@ jobs: - name: change example app to use local prisma client working-directory: ../react-native-prisma/example - run: yarn add @prisma/client@../../packages/client + run: yarn add @prisma/client@5.9.0-integration-react-native.5 - name: Install dependencies working-directory: ../react-native-prisma From b5bbcd094b1c54719a4cf27fc525b0ab889d5da6 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 5 Apr 2024 13:04:14 +0200 Subject: [PATCH 40/45] Tweak CI --- .github/workflows/react-native.yml | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml index d9196f4c3990..37db684c6d8c 100644 --- a/.github/workflows/react-native.yml +++ b/.github/workflows/react-native.yml @@ -54,16 +54,26 @@ jobs: ruby-version: 3.1.2 bundler-cache: true - - name: Cache CocoaPods - id: cache-cocoapods - uses: actions/cache@v3 - with: - path: ../react-native-prisma/example/ios/Pods - key: ${{ runner.os }}-pods-${{ hashFiles('../react-native-prisma/example/ios/Podfile.lock') }} - restore-keys: | - ${{ runner.os }}-pods- + # - name: Cache CocoaPods + # id: cache-cocoapods + # uses: actions/cache@v3 + # with: + # path: ../react-native-prisma/example/ios/Pods + # key: ${{ runner.os }}-pods-${{ hashFiles('../react-native-prisma/example/ios/Podfile.lock') }} + # restore-keys: | + # ${{ runner.os }}-pods- - name: Install CocoaPods working-directory: ../react-native-prisma/example/ios if: steps.cache-cocoapods.outputs.cache-hit != 'true' run: pod install + + - name: start simulator in release mode + working-directory: ../react-native-prisma/example + run: yarn ios:release + + - name: Run E2E tests + run: | + make build-driver-adapters-kit + make dev-react-native + cargo test -p query-engine-tests -- --test-threads=1 From 008591d60668529d44097d1f89c982b11be332ee Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 5 Apr 2024 14:31:55 +0200 Subject: [PATCH 41/45] Tweak CI --- .github/workflows/react-native.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml index 37db684c6d8c..c290019fdc99 100644 --- a/.github/workflows/react-native.yml +++ b/.github/workflows/react-native.yml @@ -68,6 +68,10 @@ jobs: if: steps.cache-cocoapods.outputs.cache-hit != 'true' run: pod install + - name: Open Metro Bundler in Background + run: | + ../react-native-prisma/example/node_modules/react-native/scripts/launchPackager.command & + - name: start simulator in release mode working-directory: ../react-native-prisma/example run: yarn ios:release From facdb56bdf58d3afec455bac1d69fef28496e972 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 5 Apr 2024 15:01:33 +0200 Subject: [PATCH 42/45] Comment e2e test for RN --- .github/workflows/react-native.yml | 170 +++++++++++++++-------------- 1 file changed, 87 insertions(+), 83 deletions(-) diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml index c290019fdc99..2960684e3778 100644 --- a/.github/workflows/react-native.yml +++ b/.github/workflows/react-native.yml @@ -1,83 +1,87 @@ -name: "React Native E2E Test" -on: - pull_request: - paths-ignore: - - "!.github/workflows/test-compilation.yml" - - ".github/**" - - ".buildkite/**" - - "*.md" - - "LICENSE" - - "CODEOWNERS" - - "renovate.json" - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - test-rn-e2e: - name: "Check react native e2e" - strategy: - fail-fast: false - runs-on: macos-latest - steps: - - uses: actions/checkout@v4 - - - name: clone react-native-prisma repo - run: git clone http://github.com/prisma/react-native-prisma.git - # The repo needs to be on the same level as the prisma-engines repo so that the make scripts work - working-directory: .. - - - uses: dtolnay/rust-toolchain@stable - - - name: Install rust ios toolchain - run: rustup target add aarch64-apple-ios-sim - - - name: Build iOS Prisma Query Engine - run: make sim - working-directory: query-engine/query-engine-c-abi - - - name: Enable corepack - run: corepack enable yarn - - - name: change example app to use local prisma client - working-directory: ../react-native-prisma/example - run: yarn add @prisma/client@5.9.0-integration-react-native.5 - - - name: Install dependencies - working-directory: ../react-native-prisma - run: yarn - - - name: Setup Ruby - uses: ruby/setup-ruby@v1 - with: - ruby-version: 3.1.2 - bundler-cache: true - - # - name: Cache CocoaPods - # id: cache-cocoapods - # uses: actions/cache@v3 - # with: - # path: ../react-native-prisma/example/ios/Pods - # key: ${{ runner.os }}-pods-${{ hashFiles('../react-native-prisma/example/ios/Podfile.lock') }} - # restore-keys: | - # ${{ runner.os }}-pods- - - - name: Install CocoaPods - working-directory: ../react-native-prisma/example/ios - if: steps.cache-cocoapods.outputs.cache-hit != 'true' - run: pod install - - - name: Open Metro Bundler in Background - run: | - ../react-native-prisma/example/node_modules/react-native/scripts/launchPackager.command & - - - name: start simulator in release mode - working-directory: ../react-native-prisma/example - run: yarn ios:release - - - name: Run E2E tests - run: | - make build-driver-adapters-kit - make dev-react-native - cargo test -p query-engine-tests -- --test-threads=1 +# name: "React Native E2E Test" +# on: +# pull_request: +# paths-ignore: +# - "!.github/workflows/test-compilation.yml" +# - ".github/**" +# - ".buildkite/**" +# - "*.md" +# - "LICENSE" +# - "CODEOWNERS" +# - "renovate.json" + +# concurrency: +# group: ${{ github.workflow }}-${{ github.ref }} +# cancel-in-progress: true + +# jobs: +# test-rn-e2e: +# name: "Check react native e2e" +# strategy: +# fail-fast: false +# runs-on: macos-latest +# steps: +# - uses: actions/checkout@v4 + +# - name: clone react-native-prisma repo +# run: git clone http://github.com/prisma/react-native-prisma.git +# # The repo needs to be on the same level as the prisma-engines repo so that the make scripts work +# working-directory: .. + +# - uses: dtolnay/rust-toolchain@stable + +# - name: Install rust ios toolchain +# run: rustup target add aarch64-apple-ios-sim + +# - name: Build iOS Prisma Query Engine +# run: make sim +# working-directory: query-engine/query-engine-c-abi + +# - name: Enable corepack +# run: corepack enable yarn + +# - name: change example app to use local prisma client +# working-directory: ../react-native-prisma/example +# run: yarn add @prisma/client@5.9.0-integration-react-native.5 + +# - name: Install dependencies +# working-directory: ../react-native-prisma +# run: yarn + +# - name: Setup Ruby +# uses: ruby/setup-ruby@v1 +# with: +# ruby-version: 3.1.2 +# bundler-cache: true + +# # - name: Cache CocoaPods +# # id: cache-cocoapods +# # uses: actions/cache@v3 +# # with: +# # path: ../react-native-prisma/example/ios/Pods +# # key: ${{ runner.os }}-pods-${{ hashFiles('../react-native-prisma/example/ios/Podfile.lock') }} +# # restore-keys: | +# # ${{ runner.os }}-pods- + +# - name: Install CocoaPods +# working-directory: ../react-native-prisma/example/ios +# if: steps.cache-cocoapods.outputs.cache-hit != 'true' +# run: pod install + +# - name: Open Metro Bundler in Background +# run: | +# ../react-native-prisma/example/node_modules/react-native/scripts/launchPackager.command & + +# - name: Build iOS +# working-directory: ../react-native-prisma/example +# run: yarn build:ios + +# - name: Run iOS Simulator +# working-directory: ../react-native-prisma/example +# run: yarn ios + +# - name: Run E2E tests +# run: | +# make build-driver-adapters-kit +# make dev-react-native +# cargo test -p query-engine-tests -- --test-threads=1 From 9e61f6407fa6b499eee390267ade3bf7e2b5db13 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Fri, 12 Apr 2024 14:50:34 +0200 Subject: [PATCH 43/45] Update query-engine/query-engine-c-abi/src/engine.rs Co-authored-by: Alexey Orlenko --- query-engine/query-engine-c-abi/src/engine.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 035eefa9d563..52a0f7320e23 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -296,12 +296,7 @@ impl QueryEngine { let query = RequestBody::try_from_str(&body, engine.engine_protocol())?; - let span = if tx_id.is_none() { - tracing::info_span!("prisma:engine", user_facing = true) - } else { - Span::none() - }; - + let span = tracing::info_span!("prisma:engine", user_facing = true); let trace_id = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); async move { From a33143ac67692b2ae005a1d92600fdbbde7d27a4 Mon Sep 17 00:00:00 2001 From: Sergey Tatarintsev Date: Fri, 12 Apr 2024 15:46:20 +0200 Subject: [PATCH 44/45] Adjust to main --- query-engine/query-engine-c-abi/Cargo.toml | 3 +++ query-engine/query-engine-c-abi/src/engine.rs | 12 ++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/query-engine/query-engine-c-abi/Cargo.toml b/query-engine/query-engine-c-abi/Cargo.toml index 6b58e43175aa..65ffd72c38cc 100644 --- a/query-engine/query-engine-c-abi/Cargo.toml +++ b/query-engine/query-engine-c-abi/Cargo.toml @@ -8,6 +8,9 @@ doc = false crate-type = ["staticlib"] name = "query_engine" +[features] +metrics = ["query-engine-common/metrics"] + [dependencies] anyhow = "1" async-trait = "0.1" diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs index 52a0f7320e23..69e8a3027cc6 100644 --- a/query-engine/query-engine-c-abi/src/engine.rs +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -24,7 +24,7 @@ use tokio::{ runtime::{self, Runtime}, sync::RwLock, }; -use tracing::{field, instrument::WithSubscriber, level_filters::LevelFilter, Instrument, Span}; +use tracing::{field, instrument::WithSubscriber, level_filters::LevelFilter, Instrument}; use query_engine_common::Result; use query_engine_common::{ @@ -147,7 +147,7 @@ impl QueryEngine { schema .diagnostics .to_result() - .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + .map_err(|err| ApiError::conversion(err, schema.db.source_assert_single()))?; let base_path = get_cstr_safe(constructor_options.base_path); match &base_path { @@ -162,11 +162,11 @@ impl QueryEngine { // constructor_options.ignore_env_var_errors, true, ) - .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + .map_err(|err| ApiError::conversion(err, schema.db.source_assert_single()))?; config .validate_that_one_datasource_is_provided() - .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + .map_err(|errors| ApiError::conversion(errors, schema.db.source_assert_single()))?; let engine_protocol = EngineProtocol::Json; @@ -226,7 +226,7 @@ impl QueryEngine { .load_url_with_config_dir(&builder.native.config_dir, |key| { builder.native.env.get(key).map(ToString::to_string) }) - .map_err(|err| ApiError::Conversion(err, builder.schema.db.source().to_owned()))?; + .map_err(|err| ApiError::Conversion(err, builder.schema.db.source_assert_single().to_owned()))?; // This version of the query engine supports connecting via Rust bindings directly // support for JS drivers can be added, but I commented it out for now let connector_kind = ConnectorKind::Rust { @@ -266,7 +266,7 @@ impl QueryEngine { native: ConnectedEngineNative { config_dir: builder.native.config_dir.clone(), env: builder.native.env.clone(), - #[cfg(all(not(target_os = "ios"), not(target_os = "android")))] + #[cfg(feature = "metrics")] metrics: None, }, }) as Result From 639970dedb8eaf6105ecb4d1e877ee11da860e90 Mon Sep 17 00:00:00 2001 From: Sergey Tatarintsev Date: Fri, 12 Apr 2024 15:50:27 +0200 Subject: [PATCH 45/45] Remove non-working workflow --- .github/workflows/react-native.yml | 87 ------------------------------ 1 file changed, 87 deletions(-) delete mode 100644 .github/workflows/react-native.yml diff --git a/.github/workflows/react-native.yml b/.github/workflows/react-native.yml deleted file mode 100644 index 2960684e3778..000000000000 --- a/.github/workflows/react-native.yml +++ /dev/null @@ -1,87 +0,0 @@ -# name: "React Native E2E Test" -# on: -# pull_request: -# paths-ignore: -# - "!.github/workflows/test-compilation.yml" -# - ".github/**" -# - ".buildkite/**" -# - "*.md" -# - "LICENSE" -# - "CODEOWNERS" -# - "renovate.json" - -# concurrency: -# group: ${{ github.workflow }}-${{ github.ref }} -# cancel-in-progress: true - -# jobs: -# test-rn-e2e: -# name: "Check react native e2e" -# strategy: -# fail-fast: false -# runs-on: macos-latest -# steps: -# - uses: actions/checkout@v4 - -# - name: clone react-native-prisma repo -# run: git clone http://github.com/prisma/react-native-prisma.git -# # The repo needs to be on the same level as the prisma-engines repo so that the make scripts work -# working-directory: .. - -# - uses: dtolnay/rust-toolchain@stable - -# - name: Install rust ios toolchain -# run: rustup target add aarch64-apple-ios-sim - -# - name: Build iOS Prisma Query Engine -# run: make sim -# working-directory: query-engine/query-engine-c-abi - -# - name: Enable corepack -# run: corepack enable yarn - -# - name: change example app to use local prisma client -# working-directory: ../react-native-prisma/example -# run: yarn add @prisma/client@5.9.0-integration-react-native.5 - -# - name: Install dependencies -# working-directory: ../react-native-prisma -# run: yarn - -# - name: Setup Ruby -# uses: ruby/setup-ruby@v1 -# with: -# ruby-version: 3.1.2 -# bundler-cache: true - -# # - name: Cache CocoaPods -# # id: cache-cocoapods -# # uses: actions/cache@v3 -# # with: -# # path: ../react-native-prisma/example/ios/Pods -# # key: ${{ runner.os }}-pods-${{ hashFiles('../react-native-prisma/example/ios/Podfile.lock') }} -# # restore-keys: | -# # ${{ runner.os }}-pods- - -# - name: Install CocoaPods -# working-directory: ../react-native-prisma/example/ios -# if: steps.cache-cocoapods.outputs.cache-hit != 'true' -# run: pod install - -# - name: Open Metro Bundler in Background -# run: | -# ../react-native-prisma/example/node_modules/react-native/scripts/launchPackager.command & - -# - name: Build iOS -# working-directory: ../react-native-prisma/example -# run: yarn build:ios - -# - name: Run iOS Simulator -# working-directory: ../react-native-prisma/example -# run: yarn ios - -# - name: Run E2E tests -# run: | -# make build-driver-adapters-kit -# make dev-react-native -# cargo test -p query-engine-tests -- --test-threads=1