From 2e1051d3e62163b3a4de20caf5bfa092322cb073 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 23 Oct 2023 15:10:58 +0200 Subject: [PATCH 01/12] Driver adapters: Support BIT_ARRAY, CIDR_ARRAY, INET_ARRAY, OID_ARRAY, VARBIT_ARRAY in pg and neon (#4363) * Support BIT_ARRAY, CIDR_ARRAY, INET_ARRAY, OID_ARRAY, VARBIT_ARRAY in pg and neon * INT8_ARRAY * It's ok to reduce precision a little bit * Null lists * Allow tests to drift in the different drivers' implementation of bigdecimal * revert changes to decima.rs test * Adapt comment --- .../js/adapter-neon/src/conversion.ts | 32 ++++++++++++++--- .../js/adapter-pg/src/conversion.ts | 34 ++++++++++++++++--- .../src/conversion/postgres.rs | 7 ++-- 3 files changed, 59 insertions(+), 14 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts index 9f6486362d78..78f285240599 100644 --- a/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-neon/src/conversion.ts @@ -6,25 +6,34 @@ const ScalarColumnType = types.builtins /** * PostgreSQL array column types (not defined in ScalarColumnType). + * + * See the semantics of each of this code in: + * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat */ const ArrayColumnType = { + BIT_ARRAY: 1561, BOOL_ARRAY: 1000, BYTEA_ARRAY: 1001, BPCHAR_ARRAY: 1014, CHAR_ARRAY: 1002, + CIDR_ARRAY: 651, DATE_ARRAY: 1182, FLOAT4_ARRAY: 1021, FLOAT8_ARRAY: 1022, + INET_ARRAY: 1041, INT2_ARRAY: 1005, INT4_ARRAY: 1007, + INT8_ARRAY: 1016, JSONB_ARRAY: 3807, JSON_ARRAY: 199, MONEY_ARRAY: 791, NUMERIC_ARRAY: 1231, + OID_ARRAY: 1028, TEXT_ARRAY: 1009, TIMESTAMP_ARRAY: 1115, TIME_ARRAY: 1183, UUID_ARRAY: 2951, + VARBIT_ARRAY: 1563, VARCHAR_ARRAY: 1015, XML_ARRAY: 143, } @@ -90,9 +99,13 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BooleanArray case ArrayColumnType.CHAR_ARRAY: return ColumnTypeEnum.CharArray + case ArrayColumnType.BPCHAR_ARRAY: case ArrayColumnType.TEXT_ARRAY: case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.BPCHAR_ARRAY: + case ArrayColumnType.VARBIT_ARRAY: + case ArrayColumnType.BIT_ARRAY: + case ArrayColumnType.INET_ARRAY: + case ArrayColumnType.CIDR_ARRAY: case ArrayColumnType.XML_ARRAY: return ColumnTypeEnum.TextArray case ArrayColumnType.DATE_ARRAY: @@ -108,7 +121,9 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BytesArray case ArrayColumnType.UUID_ARRAY: return ColumnTypeEnum.UuidArray - + case ArrayColumnType.INT8_ARRAY: + case ArrayColumnType.OID_ARRAY: + return ColumnTypeEnum.Int64Array default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -251,12 +266,21 @@ function convertBytes(serializedBytes: string): number[] { types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) /* - * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + * BYTEA_ARRAY - arrays of arbitrary raw binary strings */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) - return buffers.map(encodeBuffer) + return buffers.map((buf) => buf ? encodeBuffer(buf) : null) }) + +/* BIT_ARRAY, VARBIT_ARRAY */ + +function normalizeBit(bit: string): string { + return bit +} + +types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) +types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts index 69e8f1d9dec1..c26b13877927 100644 --- a/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts +++ b/query-engine/driver-adapters/js/adapter-pg/src/conversion.ts @@ -6,25 +6,34 @@ const ScalarColumnType = types.builtins /** * PostgreSQL array column types (not defined in ScalarColumnType). + * + * See the semantics of each of this code in: + * https://github.com/postgres/postgres/blob/master/src/include/catalog/pg_type.dat */ const ArrayColumnType = { + BIT_ARRAY: 1561, BOOL_ARRAY: 1000, BYTEA_ARRAY: 1001, BPCHAR_ARRAY: 1014, CHAR_ARRAY: 1002, + CIDR_ARRAY: 651, DATE_ARRAY: 1182, FLOAT4_ARRAY: 1021, FLOAT8_ARRAY: 1022, + INET_ARRAY: 1041, INT2_ARRAY: 1005, INT4_ARRAY: 1007, + INT8_ARRAY: 1016, JSONB_ARRAY: 3807, JSON_ARRAY: 199, MONEY_ARRAY: 791, NUMERIC_ARRAY: 1231, + OID_ARRAY: 1028, TEXT_ARRAY: 1009, TIMESTAMP_ARRAY: 1115, TIME_ARRAY: 1183, UUID_ARRAY: 2951, + VARBIT_ARRAY: 1563, VARCHAR_ARRAY: 1015, XML_ARRAY: 143, } @@ -90,9 +99,13 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BooleanArray case ArrayColumnType.CHAR_ARRAY: return ColumnTypeEnum.CharArray + case ArrayColumnType.BPCHAR_ARRAY: case ArrayColumnType.TEXT_ARRAY: case ArrayColumnType.VARCHAR_ARRAY: - case ArrayColumnType.BPCHAR_ARRAY: + case ArrayColumnType.VARBIT_ARRAY: + case ArrayColumnType.BIT_ARRAY: + case ArrayColumnType.INET_ARRAY: + case ArrayColumnType.CIDR_ARRAY: case ArrayColumnType.XML_ARRAY: return ColumnTypeEnum.TextArray case ArrayColumnType.DATE_ARRAY: @@ -108,7 +121,9 @@ export function fieldToColumnType(fieldTypeId: number): ColumnType { return ColumnTypeEnum.BytesArray case ArrayColumnType.UUID_ARRAY: return ColumnTypeEnum.UuidArray - + case ArrayColumnType.INT8_ARRAY: + case ArrayColumnType.OID_ARRAY: + return ColumnTypeEnum.Int64Array default: if (fieldTypeId >= 10000) { // Postgres Custom Types @@ -251,12 +266,21 @@ function convertBytes(serializedBytes: string): number[] { types.setTypeParser(ScalarColumnType.BYTEA, convertBytes) /* - * BYTEA_ARRAYS - arrays of arbitrary raw binary strings + * BYTEA_ARRAY - arrays of arbitrary raw binary strings */ const parseBytesArray = types.getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] types.setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { const buffers = parseBytesArray(serializedBytesArray) - return buffers.map(encodeBuffer) -}) \ No newline at end of file + return buffers.map((buf) => buf ? encodeBuffer(buf) : null) +}) + +/* BIT_ARRAY, VARBIT_ARRAY */ + +function normalizeBit(bit: string): string { + return bit +} + +types.setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) +types.setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) \ No newline at end of file diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs index 77e79f549d06..21b1ec6b2fb9 100644 --- a/query-engine/driver-adapters/src/conversion/postgres.rs +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -37,11 +37,8 @@ pub fn values_to_js_args(values: &[quaint::Value<'_>]) -> serde_json::Result JSArg::Buffer(bytes.to_vec()), None => JsonValue::Null.into(), }, - (quaint_value @ quaint::ValueType::Numeric(bd), _) => match bd { - Some(bd) => match bd.to_string().parse::() { - Ok(double) => JSArg::from(JsonValue::from(double)), - Err(_) => JSArg::from(JsonValue::from(quaint_value.clone())), - }, + (quaint::ValueType::Numeric(bd), _) => match bd { + Some(bd) => JSArg::RawString(bd.to_string()), None => JsonValue::Null.into(), }, (quaint::ValueType::Array(Some(items)), _) => JSArg::Array(values_to_js_args(items)?), From 98389c0f3bc634961b2866960d9cd85bb9a138ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Fern=C3=A1ndez?= Date: Mon, 23 Oct 2023 16:35:53 +0200 Subject: [PATCH 02/12] run driver adapter tests in ubunt-latest rather than buildjet (#4374) --- .github/workflows/query-engine-driver-adapters.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index 50f86575a8a7..d52b446b12fb 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -46,7 +46,7 @@ jobs: QUERY_BATCH_SIZE: '10' WORKSPACE_ROOT: ${{ github.workspace }} - runs-on: buildjet-16vcpu-ubuntu-2004 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 From 28291c703da2b149e7feabdebc287319e1bf0a46 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 23 Oct 2023 16:45:18 +0200 Subject: [PATCH 03/12] driver-adapters: Map planetsclae/mysql DB errors to Prisma error codes (#4364) Fix prisma/team-orm#439 --- quaint/src/connector/mysql.rs | 2 + quaint/src/connector/mysql/error.rs | 173 ++++++++++-------- quaint/src/error.rs | 1 + .../js/adapter-planetscale/src/planetscale.ts | 54 ++++-- .../js/driver-adapter-utils/src/types.ts | 6 + .../js/smoke-test-js/src/libquery/libquery.ts | 2 +- query-engine/driver-adapters/src/result.rs | 12 +- 7 files changed, 158 insertions(+), 92 deletions(-) diff --git a/quaint/src/connector/mysql.rs b/quaint/src/connector/mysql.rs index e5a1b794ab5b..4b6f27a583da 100644 --- a/quaint/src/connector/mysql.rs +++ b/quaint/src/connector/mysql.rs @@ -24,6 +24,8 @@ use std::{ use tokio::sync::Mutex; use url::{Host, Url}; +pub use error::MysqlError; + /// The underlying MySQL driver. Only available with the `expose-drivers` /// Cargo feature. #[cfg(feature = "expose-drivers")] diff --git a/quaint/src/connector/mysql/error.rs b/quaint/src/connector/mysql/error.rs index 8b381e1581bb..dd7c3d3bfa66 100644 --- a/quaint/src/connector/mysql/error.rs +++ b/quaint/src/connector/mysql/error.rs @@ -1,22 +1,29 @@ use crate::error::{DatabaseConstraint, Error, ErrorKind}; use mysql_async as my; -impl From for Error { - fn from(e: my::Error) -> Error { - use my::ServerError; +pub struct MysqlError { + pub code: u16, + pub message: String, + pub state: String, +} - match e { - my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { - message: err.to_string(), - }) - .build(), - my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { - Error::builder(ErrorKind::ConnectionClosed).build() - } - my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), - my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), - my::Error::Server(ServerError { ref message, code, .. }) if code == 1062 => { - let constraint = message +impl From<&my::ServerError> for MysqlError { + fn from(value: &my::ServerError) -> Self { + MysqlError { + code: value.code, + message: value.message.to_owned(), + state: value.state.to_owned(), + } + } +} + +impl From for Error { + fn from(error: MysqlError) -> Self { + let code = error.code; + match code { + 1062 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -29,12 +36,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1451 || code == 1452 => { - let constraint = message + 1451 | 1452 => { + let constraint = error + .message .split_whitespace() .nth(17) .and_then(|s| s.split('`').nth(1)) @@ -45,12 +53,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1263 => { - let constraint = message + 1263 => { + let constraint = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -62,22 +71,23 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1264 => { + 1264 => { let mut builder = Error::builder(ErrorKind::ValueOutOfRange { - message: message.clone(), + message: error.message.clone(), }); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1364 || code == 1048 => { - let constraint = message + 1364 | 1048 => { + let constraint = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -88,12 +98,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1049 => { - let db_name = message + 1049 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -103,12 +114,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1007 => { - let db_name = message + 1007 => { + let db_name = error + .message .split_whitespace() .nth(3) .and_then(|s| s.split('\'').nth(1)) @@ -118,12 +130,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1044 => { - let db_name = message + 1044 => { + let db_name = error + .message .split_whitespace() .last() .and_then(|s| s.split('\'').nth(1)) @@ -133,12 +146,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1045 => { - let user = message + 1045 => { + let user = error + .message .split_whitespace() .nth(4) .and_then(|s| s.split('@').next()) @@ -149,12 +163,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1146 => { - let table = message + 1146 => { + let table = error + .message .split_whitespace() .nth(1) .and_then(|s| s.split('\'').nth(1)) @@ -165,12 +180,13 @@ impl From for Error { let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { ref message, code, .. }) if code == 1054 => { - let column = message + 1054 => { + let column = error + .message .split_whitespace() .nth(2) .and_then(|s| s.split('\'').nth(1)) @@ -179,68 +195,77 @@ impl From for Error { let mut builder = Error::builder(ErrorKind::ColumnNotFound { column }); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1406 => { - let column = message.split_whitespace().flat_map(|s| s.split('\'')).nth(6).into(); + 1406 => { + let column = error + .message + .split_whitespace() + .flat_map(|s| s.split('\'')) + .nth(6) + .into(); let kind = ErrorKind::LengthMismatch { column }; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1191 => { + 1191 => { let kind = ErrorKind::MissingFullTextSearchIndex; let mut builder = Error::builder(kind); builder.set_original_code(code.to_string()); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - state: _, - }) if code == 1213 => { + 1213 => { let mut builder = Error::builder(ErrorKind::TransactionWriteConflict); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } - my::Error::Server(ServerError { - ref message, - code, - ref state, - }) => { + _ => { let kind = ErrorKind::QueryError( - my::Error::Server(ServerError { - message: message.clone(), + my::Error::Server(my::ServerError { + message: error.message.clone(), code, - state: state.clone(), + state: error.state.clone(), }) .into(), ); let mut builder = Error::builder(kind); builder.set_original_code(format!("{code}")); - builder.set_original_message(message); + builder.set_original_message(error.message); builder.build() } + } + } +} + +impl From for Error { + fn from(e: my::Error) -> Error { + match e { + my::Error::Io(my::IoError::Tls(err)) => Error::builder(ErrorKind::TlsError { + message: err.to_string(), + }) + .build(), + my::Error::Io(my::IoError::Io(err)) if err.kind() == std::io::ErrorKind::UnexpectedEof => { + Error::builder(ErrorKind::ConnectionClosed).build() + } + my::Error::Io(io_error) => Error::builder(ErrorKind::ConnectionError(io_error.into())).build(), + my::Error::Driver(e) => Error::builder(ErrorKind::QueryError(e.into())).build(), + my::Error::Server(ref server_error) => { + let mysql_error: MysqlError = server_error.into(); + mysql_error.into() + } e => Error::builder(ErrorKind::QueryError(e.into())).build(), } } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index c7c78a24772e..0460b77100fb 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -6,6 +6,7 @@ use thiserror::Error; #[cfg(feature = "pooled")] use std::time::Duration; +pub use crate::connector::mysql::MysqlError; pub use crate::connector::postgres::PostgresError; #[derive(Debug, PartialEq, Eq)] diff --git a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts index cffb00482003..5a52851112b2 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts +++ b/query-engine/driver-adapters/js/adapter-planetscale/src/planetscale.ts @@ -1,5 +1,5 @@ import type planetScale from '@planetscale/database' -import { Debug, ok } from '@prisma/driver-adapter-utils' +import { Debug, err, ok } from '@prisma/driver-adapter-utils' import type { DriverAdapter, ResultSet, @@ -36,17 +36,16 @@ class PlanetScaleQueryable field.name) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), - rows: rows as ResultSet['rows'], - lastInsertId, - } - - return ok(resultSet) + const ioResult = await this.performIO(query) + return ioResult.map(({ fields, insertId: lastInsertId, rows }) => { + const columns = fields.map((field) => field.name) + return { + columnNames: columns, + columnTypes: fields.map((field) => fieldToColumnType(field.type as PlanetScaleColumnType)), + rows: rows as ResultSet['rows'], + lastInsertId, + } + }) } /** @@ -58,8 +57,7 @@ class PlanetScaleQueryable rowsAffected) } /** @@ -67,22 +65,46 @@ class PlanetScaleQueryable> { const { sql, args: values } = query try { const result = await this.client.execute(sql, values, { as: 'array', }) - return result + return ok(result) } catch (e) { const error = e as Error + if (error.name === 'DatabaseError') { + const parsed = parseErrorMessage(error.message) + if (parsed) { + return err({ + kind: 'Mysql', + ...parsed, + }) + } + } debug('Error in performIO: %O', error) throw error } } } +function parseErrorMessage(message: string) { + const match = message.match( + /target: (?:.+?) vttablet: (?.+?) \(errno (?\d+)\) \(sqlstate (?.+?)\)/, + ) + + if (!match || !match.groups) { + return undefined + } + return { + code: Number(match.groups.code), + message: match.groups.message, + state: match.groups.state, + } +} + class PlanetScaleTransaction extends PlanetScaleQueryable implements Transaction { finished = false diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 42f1b0513076..104b23d233c5 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -47,6 +47,12 @@ export type Error = column: string | undefined hint: string | undefined } + | { + kind: 'Mysql' + code: number + message: string + state: string + } export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index 61d239ea42d6..e94eacbae328 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -327,7 +327,7 @@ export function smokeTestLibquery( }, }) - if (flavour === 'postgres') { + if (flavour === 'postgres' || flavour === 'mysql') { const result = await promise console.log('[nodejs] error result', JSON.stringify(result, null, 2)) assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index 10bdb8a4aecb..08397d834ed0 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,5 +1,5 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, PostgresError}; +use quaint::error::{Error as QuaintError, MysqlError, PostgresError}; use serde::Deserialize; #[derive(Deserialize)] @@ -13,6 +13,14 @@ pub struct PostgresErrorDef { hint: Option, } +#[derive(Deserialize)] +#[serde(remote = "MysqlError")] +pub struct MysqlErrorDef { + pub code: u16, + pub message: String, + pub state: String, +} + #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors @@ -24,6 +32,7 @@ pub(crate) enum DriverAdapterError { }, Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), + Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), // in the future, expected errors that map to known user errors with PXXX codes will also go here } @@ -40,6 +49,7 @@ impl From for QuaintError { match value { DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), DriverAdapterError::Postgres(e) => e.into(), + DriverAdapterError::Mysql(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From 475c616176945d72f4330c92801f0c5e6398dc0f Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 23 Oct 2023 17:40:38 +0200 Subject: [PATCH 04/12] driver-adapters: Map libsql errors to Prisma errors (#4362) Similar approach to what we did with Neon: raw error data is returned from driver adapter in case of DB error, which then reuses Quaint's error handling code for adapter too. Close prisma/team-orm#393 --- quaint/src/connector/sqlite.rs | 2 + quaint/src/connector/sqlite/error.rs | 211 ++++++++---------- quaint/src/error.rs | 1 + .../js/adapter-libsql/src/libsql.ts | 48 ++-- .../js/driver-adapter-utils/src/types.ts | 8 + .../driver-adapters/js/pnpm-lock.yaml | 44 ++-- .../js/smoke-test-js/src/libquery/libquery.ts | 18 +- query-engine/driver-adapters/src/result.rs | 12 +- 8 files changed, 170 insertions(+), 174 deletions(-) diff --git a/quaint/src/connector/sqlite.rs b/quaint/src/connector/sqlite.rs index 6db49523c80a..3a1ef72b4883 100644 --- a/quaint/src/connector/sqlite.rs +++ b/quaint/src/connector/sqlite.rs @@ -1,6 +1,8 @@ mod conversion; mod error; +pub use error::SqliteError; + pub use rusqlite::{params_from_iter, version as sqlite_version}; use super::IsolationLevel; diff --git a/quaint/src/connector/sqlite/error.rs b/quaint/src/connector/sqlite/error.rs index fa8b83f3f28a..c10b335cb3c0 100644 --- a/quaint/src/connector/sqlite/error.rs +++ b/quaint/src/connector/sqlite/error.rs @@ -1,69 +1,45 @@ +use std::fmt; + use crate::error::*; use rusqlite::ffi; use rusqlite::types::FromSqlError; -impl From for Error { - fn from(e: rusqlite::Error) -> Error { - match e { - rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { - Ok(error) => *error, - Err(error) => { - let mut builder = Error::builder(ErrorKind::QueryError(error)); - - builder.set_original_message("Could not interpret parameters in an SQLite query."); - - builder.build() - } - }, - rusqlite::Error::InvalidQuery => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - - builder.set_original_message( - "Could not interpret the query or its parameters. Check the syntax and parameter types.", - ); - - builder.build() - } - rusqlite::Error::ExecuteReturnedResults => { - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_message("Execute returned results, which is not allowed in SQLite."); - - builder.build() - } - - rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), +#[derive(Debug)] +pub struct SqliteError { + pub extended_code: i32, + pub message: Option, +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 2067, - }, - Some(description), - ) => { - let constraint = description - .split(": ") - .nth(1) - .map(|s| s.split(", ")) - .map(|i| i.flat_map(|s| s.split('.').last())) - .map(DatabaseConstraint::fields) - .unwrap_or(DatabaseConstraint::CannotParse); +impl fmt::Display for SqliteError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Error code {}: {}", + self.extended_code, + ffi::code_to_str(self.extended_code) + ) + } +} - let kind = ErrorKind::UniqueConstraintViolation { constraint }; - let mut builder = Error::builder(kind); +impl std::error::Error for SqliteError {} - builder.set_original_code("2067"); - builder.set_original_message(description); +impl SqliteError { + pub fn new(extended_code: i32, message: Option) -> Self { + Self { extended_code, message } + } - builder.build() - } + pub fn primary_code(&self) -> i32 { + self.extended_code & 0xFF + } +} - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(description), - ) => { +impl From for Error { + fn from(error: SqliteError) -> Self { + match error { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_UNIQUE | ffi::SQLITE_CONSTRAINT_PRIMARYKEY, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -75,19 +51,16 @@ impl From for Error { let kind = ErrorKind::UniqueConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1555"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1299, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_NOTNULL, + message: Some(description), + } => { let constraint = description .split(": ") .nth(1) @@ -99,64 +72,41 @@ impl From for Error { let kind = ErrorKind::NullConstraintViolation { constraint }; let mut builder = Error::builder(kind); - builder.set_original_code("1299"); - builder.set_original_message(description); - - builder.build() - } - - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 787, - }, - Some(description), - ) => { - let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { - constraint: DatabaseConstraint::ForeignKey, - }); - - builder.set_original_code("787"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1811, - }, - Some(description), - ) => { + SqliteError { + extended_code: ffi::SQLITE_CONSTRAINT_FOREIGNKEY | ffi::SQLITE_CONSTRAINT_TRIGGER, + message: Some(description), + } => { let mut builder = Error::builder(ErrorKind::ForeignKeyConstraintViolation { constraint: DatabaseConstraint::ForeignKey, }); - builder.set_original_code("1811"); + builder.set_original_code(error.extended_code.to_string()); builder.set_original_message(description); builder.build() } - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::DatabaseBusy, - extended_code, - }, - description, - ) => { + SqliteError { extended_code, message } if error.primary_code() == ffi::SQLITE_BUSY => { let mut builder = Error::builder(ErrorKind::SocketTimeout); builder.set_original_code(format!("{extended_code}")); - if let Some(description) = description { + if let Some(description) = message { builder.set_original_message(description); } builder.build() } - rusqlite::Error::SqliteFailure(ffi::Error { extended_code, .. }, ref description) => match description { + SqliteError { + extended_code, + ref message, + } => match message { Some(d) if d.starts_with("no such table") => { let table = d.split(": ").last().into(); let kind = ErrorKind::TableDoesNotExist { table }; @@ -188,8 +138,8 @@ impl From for Error { builder.build() } _ => { - let description = description.as_ref().map(|d| d.to_string()); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + let description = message.as_ref().map(|d| d.to_string()); + let mut builder = Error::builder(ErrorKind::QueryError(error.into())); builder.set_original_code(format!("{extended_code}")); if let Some(description) = description { @@ -199,31 +149,50 @@ impl From for Error { builder.build() } }, + } + } +} - rusqlite::Error::SqlInputError { - error: ffi::Error { extended_code, .. }, - ref msg, - .. - } => match msg { - d if d.starts_with("no such column: ") => { - let column = d.split("no such column: ").last().into(); - let kind = ErrorKind::ColumnNotFound { column }; - - let mut builder = Error::builder(kind); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(d); +impl From for Error { + fn from(e: rusqlite::Error) -> Error { + match e { + rusqlite::Error::ToSqlConversionFailure(error) => match error.downcast::() { + Ok(error) => *error, + Err(error) => { + let mut builder = Error::builder(ErrorKind::QueryError(error)); - builder.build() - } - _ => { - let description = msg.clone(); - let mut builder = Error::builder(ErrorKind::QueryError(e.into())); - builder.set_original_code(extended_code.to_string()); - builder.set_original_message(description); + builder.set_original_message("Could not interpret parameters in an SQLite query."); builder.build() } }, + rusqlite::Error::InvalidQuery => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + + builder.set_original_message( + "Could not interpret the query or its parameters. Check the syntax and parameter types.", + ); + + builder.build() + } + rusqlite::Error::ExecuteReturnedResults => { + let mut builder = Error::builder(ErrorKind::QueryError(e.into())); + builder.set_original_message("Execute returned results, which is not allowed in SQLite."); + + builder.build() + } + + rusqlite::Error::QueryReturnedNoRows => Error::builder(ErrorKind::NotFound).build(), + + rusqlite::Error::SqliteFailure(ffi::Error { code: _, extended_code }, message) => { + SqliteError::new(extended_code, message).into() + } + + rusqlite::Error::SqlInputError { + error: ffi::Error { extended_code, .. }, + msg, + .. + } => SqliteError::new(extended_code, Some(msg)).into(), e => Error::builder(ErrorKind::QueryError(e.into())).build(), } diff --git a/quaint/src/error.rs b/quaint/src/error.rs index 0460b77100fb..705bb6b37ee0 100644 --- a/quaint/src/error.rs +++ b/quaint/src/error.rs @@ -8,6 +8,7 @@ use std::time::Duration; pub use crate::connector::mysql::MysqlError; pub use crate::connector::postgres::PostgresError; +pub use crate::connector::sqlite::SqliteError; #[derive(Debug, PartialEq, Eq)] pub enum DatabaseConstraint { diff --git a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts index 5d104e8e2949..6528c8f44a8a 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts +++ b/query-engine/driver-adapters/js/adapter-libsql/src/libsql.ts @@ -1,4 +1,4 @@ -import { Debug, ok } from '@prisma/driver-adapter-utils' +import { Debug, ok, err } from '@prisma/driver-adapter-utils' import type { DriverAdapter, Query, @@ -8,7 +8,12 @@ import type { Transaction, TransactionOptions, } from '@prisma/driver-adapter-utils' -import type { InStatement, Client as LibSqlClientRaw, Transaction as LibSqlTransactionRaw } from '@libsql/client' +import type { + InStatement, + Client as LibSqlClientRaw, + Transaction as LibSqlTransactionRaw, + ResultSet as LibSqlResultSet, +} from '@libsql/client' import { Mutex } from 'async-mutex' import { getColumnTypes, mapRow } from './conversion' @@ -33,17 +38,17 @@ class LibSqlQueryable implements const tag = '[js::query_raw]' debug(`${tag} %O`, query) - const { columns, rows, columnTypes: declaredColumnTypes } = await this.performIO(query) - - const columnTypes = getColumnTypes(declaredColumnTypes, rows) + const ioResult = await this.performIO(query) - const resultSet: ResultSet = { - columnNames: columns, - columnTypes, - rows: rows.map((row) => mapRow(row, columnTypes)), - } + return ioResult.map(({ columns, rows, columnTypes: declaredColumnTypes }) => { + const columnTypes = getColumnTypes(declaredColumnTypes, rows) - return ok(resultSet) + return { + columnNames: columns, + columnTypes, + rows: rows.map((row) => mapRow(row, columnTypes)), + } + }) } /** @@ -55,8 +60,7 @@ class LibSqlQueryable implements const tag = '[js::execute_raw]' debug(`${tag} %O`, query) - const { rowsAffected } = await this.performIO(query) - return ok(rowsAffected ?? 0) + return (await this.performIO(query)).map(({ rowsAffected }) => rowsAffected ?? 0) } /** @@ -64,14 +68,22 @@ class LibSqlQueryable implements * Should the query fail due to a connection error, the connection is * marked as unhealthy. */ - private async performIO(query: Query) { + private async performIO(query: Query): Promise> { const release = await this[LOCK_TAG].acquire() try { const result = await this.client.execute(query as InStatement) - return result + return ok(result) } catch (e) { const error = e as Error debug('Error in performIO: %O', error) + const rawCode = error['rawCode'] ?? e.cause?.['rawCode'] + if (typeof rawCode === 'number') { + return err({ + kind: 'Sqlite', + extendedCode: rawCode, + message: error.message, + }) + } throw error } finally { release() @@ -82,11 +94,7 @@ class LibSqlQueryable implements class LibSqlTransaction extends LibSqlQueryable implements Transaction { finished = false - constructor( - client: TransactionClient, - readonly options: TransactionOptions, - readonly unlockParent: () => void, - ) { + constructor(client: TransactionClient, readonly options: TransactionOptions, readonly unlockParent: () => void) { super(client) } diff --git a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts index 104b23d233c5..92019f81824b 100644 --- a/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts +++ b/query-engine/driver-adapters/js/driver-adapter-utils/src/types.ts @@ -53,6 +53,14 @@ export type Error = message: string state: string } + | { + kind: 'Sqlite' + /** + * Sqlite extended error code: https://www.sqlite.org/rescode.html + */ + extendedCode: number + message: string + } export interface Queryable { readonly flavour: 'mysql' | 'postgres' | 'sqlite' diff --git a/query-engine/driver-adapters/js/pnpm-lock.yaml b/query-engine/driver-adapters/js/pnpm-lock.yaml index 3f7f13d3ff6a..9a82ffdbac63 100644 --- a/query-engine/driver-adapters/js/pnpm-lock.yaml +++ b/query-engine/driver-adapters/js/pnpm-lock.yaml @@ -435,21 +435,21 @@ packages: dependencies: '@libsql/hrana-client': 0.5.5 js-base64: 3.7.5 - libsql: 0.1.23 + libsql: 0.1.28 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate - /@libsql/darwin-arm64@0.1.23: - resolution: {integrity: sha512-+V9aoOrZ47iYbY5NrcS0F2bDOCH407QI0wxAtss0CLOcFxlz/T6Nw0ryLK31GabklJQAmOXIyqkumLfz5HT64w==} + /@libsql/darwin-arm64@0.1.28: + resolution: {integrity: sha512-p4nldHUOhcl9ibnH1F6oiXV5Dl3PAcPB9VIjdjVvO3/URo5J7mhqRMuwJMKO5DZJJGtkKJ5IO0gu0hc90rnKIg==} cpu: [arm64] os: [darwin] requiresBuild: true optional: true - /@libsql/darwin-x64@0.1.23: - resolution: {integrity: sha512-toHo7s0HiMl4VCIfjhGXDe9bGWWo78eP8fxIbwU6RlaLO6MNV9fjHY/GjTWccWOwyxcT+q6X/kUc957HnoW3bg==} + /@libsql/darwin-x64@0.1.28: + resolution: {integrity: sha512-WaEK+Z+wP5sr0h8EcusSGHv4Mqc3smYICeG4P/wsbRDKQ2WUMWqZrpgqaBsm+WPbXogU2vpf+qGc8BnpFZ0ggw==} cpu: [x64] os: [darwin] requiresBuild: true @@ -484,22 +484,29 @@ packages: - bufferutil - utf-8-validate - /@libsql/linux-x64-gnu@0.1.23: - resolution: {integrity: sha512-U11LdjayakOj0lQCHDYkTgUfe4Q+7AjZZh8MzgEDF/9l0bmKNI3eFLWA3JD2Xm98yz65lUx95om0WKOKu5VW/w==} + /@libsql/linux-arm64-gnu@0.1.28: + resolution: {integrity: sha512-a17ANBuOqH2L8gdyET4Kg3XggQvxWnoA+7x7sDEX5NyWNyvr7P04WzNPAT0xAOWLclC1fDD6jM5sh/fbJk/7NA==} + cpu: [arm64] + os: [linux] + requiresBuild: true + optional: true + + /@libsql/linux-x64-gnu@0.1.28: + resolution: {integrity: sha512-dkg+Ou7ApV0PHpZWd9c6NrYyc/WSNn5h/ScKotaMTLWlLL96XAMNwrYLpZpUj61I2y7QzU98XtMfiSD1Ux+VaA==} cpu: [x64] os: [linux] requiresBuild: true optional: true - /@libsql/linux-x64-musl@0.1.23: - resolution: {integrity: sha512-8UcCK2sPVzcafHsEmcU5IDp/NxjD6F6JFS5giijsMX5iGgxYQiiwTUMOmSxW0AWBeT4VY5U7G6rG5PC8JSFtfg==} + /@libsql/linux-x64-musl@0.1.28: + resolution: {integrity: sha512-ZuOxCDYlG+f1IDsxstmaxLtgG9HvlLuUKs0X3um4f5F5V+P+PF8qr08gSdD1IP2pj+JBOiwhQffaEpR1wupxhQ==} cpu: [x64] os: [linux] requiresBuild: true optional: true - /@libsql/win32-x64-msvc@0.1.23: - resolution: {integrity: sha512-HAugD66jTmRRRGNMLKRiaFeMOC3mgUsAiuO6NRdRz3nM6saf9e5QqN/Ppuu9yqHHcZfv7VhQ9UGlAvzVK64Itg==} + /@libsql/win32-x64-msvc@0.1.28: + resolution: {integrity: sha512-2cmUiMIsJLHpetebGeeYqUYaCPWEnwMjqxwu1ZEEbA5x8r+DNmIhLrc0QSQ29p7a5u14vbZnShNOtT/XG7vKew==} cpu: [x64] os: [win32] requiresBuild: true @@ -971,19 +978,20 @@ packages: /js-base64@3.7.5: resolution: {integrity: sha512-3MEt5DTINKqfScXKfJFrRbxkrnk2AxPWGBL/ycjz4dK8iqiSJ06UxD8jh8xuh6p10TX4t2+7FsBYVxxQbMg+qA==} - /libsql@0.1.23: - resolution: {integrity: sha512-Nf/1B2Glxvcnba4jYFhXcaYmicyBA3RRm0LVwBkTl8UWCIDbX+Ad7c1ecrQwixPLPffWOVxKIqyCNTuUHUkVgA==} + /libsql@0.1.28: + resolution: {integrity: sha512-yCKlT0ntV8ZIWTPGNClhQQeH/LNAzLjbbEgBvgLb+jfQwAuTbyvPpVVLwkZzesqja1nbkWApztW0pX81Jp0pkw==} cpu: [x64, arm64] os: [darwin, linux, win32] dependencies: '@neon-rs/load': 0.0.4 detect-libc: 2.0.2 optionalDependencies: - '@libsql/darwin-arm64': 0.1.23 - '@libsql/darwin-x64': 0.1.23 - '@libsql/linux-x64-gnu': 0.1.23 - '@libsql/linux-x64-musl': 0.1.23 - '@libsql/win32-x64-msvc': 0.1.23 + '@libsql/darwin-arm64': 0.1.28 + '@libsql/darwin-x64': 0.1.28 + '@libsql/linux-arm64-gnu': 0.1.28 + '@libsql/linux-x64-gnu': 0.1.28 + '@libsql/linux-x64-musl': 0.1.28 + '@libsql/win32-x64-msvc': 0.1.28 /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} diff --git a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts index e94eacbae328..c50ad3e257ab 100644 --- a/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts +++ b/query-engine/driver-adapters/js/smoke-test-js/src/libquery/libquery.ts @@ -290,13 +290,13 @@ export function smokeTestLibquery( }) it('expected error (on duplicate insert) as json result (not throwing error)', async () => { - // clean up first await doQuery({ modelName: 'Unique', action: 'deleteMany', query: { + arguments: {}, selection: { - count: true, + $scalars: true, }, }, }) @@ -327,17 +327,9 @@ export function smokeTestLibquery( }, }) - if (flavour === 'postgres' || flavour === 'mysql') { - const result = await promise - console.log('[nodejs] error result', JSON.stringify(result, null, 2)) - assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') - } else { - await assert.rejects(promise, (err) => { - assert(typeof err === 'object' && err !== null) - assert.match(err['message'], /unique/i) - return true - }) - } + const result = await promise + console.log('[nodejs] error result', JSON.stringify(result, null, 2)) + assert.equal(result?.errors?.[0]?.['user_facing_error']?.['error_code'], 'P2002') }) describe('read scalar and non scalar types', () => { diff --git a/query-engine/driver-adapters/src/result.rs b/query-engine/driver-adapters/src/result.rs index 08397d834ed0..c43f66a81e72 100644 --- a/query-engine/driver-adapters/src/result.rs +++ b/query-engine/driver-adapters/src/result.rs @@ -1,5 +1,5 @@ use napi::{bindgen_prelude::FromNapiValue, Env, JsUnknown, NapiValue}; -use quaint::error::{Error as QuaintError, MysqlError, PostgresError}; +use quaint::error::{Error as QuaintError, MysqlError, PostgresError, SqliteError}; use serde::Deserialize; #[derive(Deserialize)] @@ -21,6 +21,13 @@ pub struct MysqlErrorDef { pub state: String, } +#[derive(Deserialize)] +#[serde(remote = "SqliteError", rename_all = "camelCase")] +pub struct SqliteErrorDef { + pub extended_code: i32, + pub message: Option, +} + #[derive(Deserialize)] #[serde(tag = "kind")] /// Wrapper for JS-side errors @@ -33,7 +40,7 @@ pub(crate) enum DriverAdapterError { Postgres(#[serde(with = "PostgresErrorDef")] PostgresError), Mysql(#[serde(with = "MysqlErrorDef")] MysqlError), - // in the future, expected errors that map to known user errors with PXXX codes will also go here + Sqlite(#[serde(with = "SqliteErrorDef")] SqliteError), } impl FromNapiValue for DriverAdapterError { @@ -50,6 +57,7 @@ impl From for QuaintError { DriverAdapterError::GenericJs { id } => QuaintError::external_error(id), DriverAdapterError::Postgres(e) => e.into(), DriverAdapterError::Mysql(e) => e.into(), + DriverAdapterError::Sqlite(e) => e.into(), // in future, more error types would be added and we'll need to convert them to proper QuaintErrors here } } From 39b6c54adcb81cacdaca4648194c925105406ae9 Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Mon, 23 Oct 2023 19:31:01 +0200 Subject: [PATCH 05/12] chore(docs): Add "how to" README sections to public-facing Driver Adapters (#4377) * chore(driver-adapters): add README for PlanetScale * chore(driver-adapters): add README for Neon * chore(driver-adapters): add README for LibSQL * chore: fix sentence removing env var reference --- .../js/adapter-libsql/README.md | 90 +++++++++++++++++++ .../driver-adapters/js/adapter-neon/README.md | 68 +++++++++++++- .../js/adapter-planetscale/README.md | 67 +++++++++++++- 3 files changed, 223 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/adapter-libsql/README.md b/query-engine/driver-adapters/js/adapter-libsql/README.md index 219200af2080..5ca415ea8ec9 100644 --- a/query-engine/driver-adapters/js/adapter-libsql/README.md +++ b/query-engine/driver-adapters/js/adapter-libsql/README.md @@ -3,3 +3,93 @@ Prisma driver adapter for Turso and libSQL. See https://prisma.io/turso for details. + +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [getting started with Turso](https://www.prisma.io/blog/prisma-turso-ea-support-rXGd_Tmy3UXX#create-a-database-on-turso), you can use the Turso serverless driver to connect to your database. You will need to install the `@prisma/adapter-libsql` driver adapter and the `@libsql/client` serverless driver. + +```sh +npm install @prisma/adapter-libsql +npm install @libsql/client +``` + +Make sure your Turso database connection string and authentication token is copied over to your `.env` file. The connection string will start with `libsql://`. + +```env +# .env +TURSO_AUTH_TOKEN="eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9..." +TURSO_DATABASE_URL="libsql://turso-prisma-random-user.turso.io" +``` + +You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "sqlite" + url = "file:./dev.db" +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to setup migrations + +As Turso needs to sync between a local sqlite database and another one hosted on Turso Cloud, an additional migration setup is needed. In particular, anytime you modify models and relations in your `schema.prisma` file, you should: + +1. Create a baseline migration + +```sh +npx prisma migrate diff --from-empty \ + --to-schema-datamodel prisma/schema.prisma \ + --script > baseline.sql +``` + +2. Apply the migration to your Turso database + +```sh +turso db shell turso-prisma < baseline.sql +``` + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the libSQL serverless database driver +3. Instantiate the Prisma libSQL adapter with the libSQL serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { PrismaClient } from '@prisma/client'; +import { PrismaLibSQL } from '@prisma/adapter-libsql'; +import { createClient } from '@libsql/client'; + +// Setup +const connectionString = `${process.env.TURSO_DATABASE_URL}`; +const authToken = `${process.env.TURSO_AUTH_TOKEN}`; + +// Init prisma client +const libsql = createClient({ + url: connectionString, + authToken, +}); +const adapter = new PrismaLibSQL(libsql); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Your Prisma Client instance now uses a **single** remote Turso database. +You can take it a step further by setting up database replicas. Turso automatically picks the closest replica to your app for read queries when you create replicas. No additional logic is required to define how the routing of the read queries should be handled. Write queries will be forwarded to the primary database. +We encourage you to create an issue if you find something missing or run into a bug. + +If you have any feedback about our libSQL Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21345) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-neon/README.md b/query-engine/driver-adapters/js/adapter-neon/README.md index 8af259ab74c1..f36f44c6bca4 100644 --- a/query-engine/driver-adapters/js/adapter-neon/README.md +++ b/query-engine/driver-adapters/js/adapter-neon/README.md @@ -2,4 +2,70 @@ Prisma driver adapter for [Neon Serverless Driver](https://github.com/neondatabase/serverless). -See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. +See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. + +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [creating your database on Neon](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you'll need to install the `@prisma/adapter-neon` driver adapter, Neon’s serverless database driver `@neondatabase/serverless`, and `ws` to set up a WebSocket connection for use by Neon. + +```sh +npm install @prisma/adapter-neon +npm install @neondatabase/serverless +npm install ws +``` + +Make sure your [Neon database connection string](https://neon.tech/docs/connect/connect-from-any-app) is copied over to your `.env` file. The connection string will start with `postgres://`. + +```env +# .env +DATABASE_URL="postgres://..." +``` + +Make sure you also include the `driverAdapters` Preview feature in your `schema.prisma`. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the Neon serverless database driver +3. Instantiate the Prisma Neon adapter with the Neon serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { Pool, neonConfig } from '@neondatabase/serverless'; +import { PrismaNeon } from '@prisma/adapter-neon'; +import { PrismaClient } from '@prisma/client'; +import ws from 'ws'; + +// Setup +neonConfig.webSocketConstructor = ws; +const connectionString = `${process.env.DATABASE_URL}`; + +// Init prisma client +const pool = new Pool({ connectionString }); +const adapter = new PrismaNeon(pool); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Now your code has built-in benefits of the Neon serverless driver, such as WebSocket connections and [message pipelining](https://neon.tech/blog/quicker-serverless-postgres), while Prisma covers connection creation and destruction, error handling, and type safety. If you have any feedback about our Neon Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21346) and we'll use it as we continue development. diff --git a/query-engine/driver-adapters/js/adapter-planetscale/README.md b/query-engine/driver-adapters/js/adapter-planetscale/README.md index 8e145c07c098..a4cdc132036a 100644 --- a/query-engine/driver-adapters/js/adapter-planetscale/README.md +++ b/query-engine/driver-adapters/js/adapter-planetscale/README.md @@ -2,5 +2,70 @@ Prisma driver adapter for [PlanetScale Serverless Driver](https://github.com/planetscale/database-js). -See https://github.com/prisma/prisma/releases/tag/5.4.0 for details. +See https://github.com/prisma/prisma/releases/tag/5.4.0 and https://www.prisma.io/blog/serverless-database-drivers-KML1ehXORxZV for details. +The following usage tutorial is valid for Prisma 5.4.2 and later versions. + +## How to install + +After [getting started with PlanetScale](https://neon.tech/docs/get-started-with-neon/setting-up-a-project), you can use the PlanetScale serverless driver to connect to your database. You will need to install the `@prisma/adapter-planetscale` driver adapter, the `@planetscale/database` serverless driver, and `undici` to provide a `fetch` function to the PlanetScale driver. + +```sh +npm install @prisma/adapter-planetscale +npm install @planetscale/database +npm install undici +``` + +Make sure your [PlanetScale database connection string](https://planetscale.com/docs/concepts/connection-strings) is copied over to your `.env` file. The connection string will start with `mysql://`. + +```env +# .env +DATABASE_URL="mysql://..." +``` + +You can now reference this environment variable in your `schema.prisma` datasource. Make sure you also include the `driverAdapters` Preview feature. + +```prisma +// schema.prisma +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +datasource db { + provider = "mysql" + url = env("DATABASE_URL") + relationMode = "prisma" +} +``` + +Now run `npx prisma generate` to re-generate Prisma Client. + +## How to use + +In TypeScript, you will need to: + +1. Import packages +2. Set up the PlanetScale serverless database driver +3. Instantiate the Prisma PlanetScale adapter with the PlanetScale serverless database driver +4. Pass the driver adapter to the Prisma Client instance + +```typescript +// Import needed packages +import { connect } from '@planetscale/database'; +import { PrismaPlanetScale } from '@prisma/adapter-planetscale'; +import { PrismaClient } from '@prisma/client'; +import { fetch as undiciFetch } from 'undici'; + +// Setup +const connectionString = `${process.env.DATABASE_URL}`; + +// Init prisma client +const connection = connect({ url: connectionString, fetch: undiciFetch }); +const adapter = new PrismaPlanetScale(connection); +const prisma = new PrismaClient({ adapter }); + +// Use Prisma Client as normal +``` + +Your Prisma Client instance now uses PlanetScale's [`database-js`](https://github.com/planetscale/database-js), which can improve [`connection reliability and performance`](https://planetscale.com/blog/faster-mysql-with-http3). It uses HTTP requests instead of Prisma’s connection pool, but Prisma will continue to handle error handling and type safety. If you have any feedback about our PlanetScale Serverless Driver support, please leave a comment on our [dedicated GitHub issue](https://github.com/prisma/prisma/discussions/21347) and we'll use it as we continue development. From 2450f885b75e29f5a6d7cde46d6a3e05290e5b33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Tue, 24 Oct 2023 16:11:10 +0200 Subject: [PATCH 06/12] chore(docker): switch restart to unless-stopped + auto-formatting (#4369) --- .../workflows/publish-prisma-schema-wasm.yml | 10 +- docker-compose.yml | 206 +++++++++--------- quaint/docker-compose.yml | 34 +-- 3 files changed, 127 insertions(+), 123 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index e166c05e5841..f453811009ce 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -12,7 +12,7 @@ on: required: true npmDistTag: required: true - default: "latest" + default: 'latest' jobs: build: @@ -21,7 +21,7 @@ jobs: steps: - name: Print input env: - THE_INPUT: "${{ toJson(github.event.inputs) }}" + THE_INPUT: '${{ toJson(github.event.inputs) }}' run: | echo $THE_INPUT @@ -42,7 +42,7 @@ jobs: - uses: actions/setup-node@v3 with: - node-version: "14.x" + node-version: '14.x' - name: Set up NPM token run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc @@ -65,6 +65,6 @@ jobs: if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1 env: - SLACK_TITLE: "prisma-schema-wasm publishing failed :x:" - SLACK_COLOR: "#FF0000" + SLACK_TITLE: 'prisma-schema-wasm publishing failed :x:' + SLACK_COLOR: '#FF0000' SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WASM_FAILING }} diff --git a/docker-compose.yml b/docker-compose.yml index fad49d836cde..fc585adabafe 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,113 +1,115 @@ -version: "3" +version: '3' services: cockroach_23_1: image: prismagraphql/cockroachdb-custom:23.1 + restart: unless-stopped command: | start-single-node --insecure ports: - - "26260:26257" + - '26260:26257' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases cockroach_22_1_0: image: prismagraphql/cockroachdb-custom:22.1.0 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26257:26257" + - '26257:26257' networks: - databases cockroach_21_2_0_patched: image: prismagraphql/cockroachdb-custom:21.2.0-patched - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26258:26257" + - '26258:26257' networks: - databases pgbouncer: image: brainsam/pgbouncer:latest - restart: always + restart: unless-stopped environment: - DB_HOST: "postgres11" - DB_PORT: "5432" - DB_USER: "postgres" - DB_PASSWORD: "prisma" - POOL_MODE: "transaction" - MAX_CLIENT_CONN: "1000" + DB_HOST: 'postgres11' + DB_PORT: '5432' + DB_USER: 'postgres' + DB_PASSWORD: 'prisma' + POOL_MODE: 'transaction' + MAX_CLIENT_CONN: '1000' networks: - databases ports: - - "6432:6432" + - '6432:6432' postgres9: image: postgres:9.6 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5431:5432" + - '5431:5432' networks: - databases postgres10: image: postgres:10 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5432:5432" + - '5432:5432' networks: - databases postgres11: image: postgres:11 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5433:5432" + - '5433:5432' networks: - databases postgres12: image: postgres:12 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5434:5432" + - '5434:5432' networks: - databases postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" + POSTGRES_PASSWORD: 'prisma' ports: - - "5435:5432" + - '5435:5432' networks: - databases neon-postgres13: image: ghcr.io/neondatabase/wsproxy:latest + restart: unless-stopped environment: # the port of the postgres13 within the databases network APPEND_PORT: 'postgres13:5432' @@ -131,50 +133,50 @@ services: - '8085:8085' depends_on: - vitess-test-8_0 - restart: always + restart: unless-stopped healthcheck: - test: [ 'CMD', 'nc', '-z', '127.0.0.1', '8085' ] + test: ['CMD', 'nc', '-z', '127.0.0.1', '8085'] interval: 5s timeout: 2s retries: 20 postgres14: image: postgres:14 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5437:5432" + - '5437:5432' networks: - databases postgres15: image: postgres:15 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - POSTGRES_HOST_AUTH_METHOD: "md5" - POSTGRES_INITDB_ARGS: "--auth-host=md5" + POSTGRES_PASSWORD: 'prisma' + POSTGRES_HOST_AUTH_METHOD: 'md5' + POSTGRES_INITDB_ARGS: '--auth-host=md5' ports: - - "5438:5432" + - '5438:5432' networks: - databases mysql-5-6: image: mysql:5.6.50 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3309:3306" + - '3309:3306' networks: - databases tmpfs: /var/lib/mysql @@ -182,14 +184,14 @@ services: mysql-5-7: image: mysql:5.7.32 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql @@ -197,33 +199,33 @@ services: mysql-8-0: image: mysql:8.0.28 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb-10-0: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb vitess-test-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33577:33577 environment: @@ -243,7 +245,7 @@ services: vitess-test-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33807:33807 environment: @@ -263,7 +265,7 @@ services: vitess-shadow-5_7: image: vitess/vttestserver:mysql57@sha256:23863a518b34330109c502ac61a396008f5f023e96263bcb2bb1b0f7f7d5dc7f - restart: always + restart: unless-stopped ports: - 33578:33577 environment: @@ -283,7 +285,7 @@ services: vitess-shadow-8_0: image: vitess/vttestserver:mysql80@sha256:8bec2644d83cb322eb2cdd596d33c0f858243ba6ade9164c95dfcc519643094e - restart: always + restart: unless-stopped ports: - 33808:33807 environment: @@ -303,139 +305,140 @@ services: mssql-2017: image: mcr.microsoft.com/mssql/server:2017-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1434:1433" + - '1434:1433' networks: - databases - + mssql-2019: image: mcr.microsoft.com/mssql/server:2019-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mssql-2022: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1435:1433" + - '1435:1433' networks: - databases azure-edge: image: mcr.microsoft.com/azure-sql-edge - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - MSSQL_SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + MSSQL_SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases mongo42: image: prismagraphql/mongo-single-replica:4.2.17-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27016 INIT_WAIT_SEC: $INIT_WAIT_SEC networks: - databases ports: - - "27016:27016" + - '27016:27016' mongo44: image: prismagraphql/mongo-single-replica:4.4.3-bionic - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo42-single: image: mongo:4.2 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27016:27017" + - '27016:27017' networks: - databases mongo44-single: image: mongo:4.4 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27017:27017" + - '27017:27017' networks: - databases mongo5: image: prismagraphql/mongo-single-replica:5.0.3 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' MONGO_PORT: 27018 INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27018" + - '27018:27018' networks: - databases mongo5-single: image: mongo:5 - restart: always + restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: "prisma" - MONGO_INITDB_ROOT_PASSWORD: "prisma" + MONGO_INITDB_ROOT_USERNAME: 'prisma' + MONGO_INITDB_ROOT_PASSWORD: 'prisma' INIT_WAIT_SEC: $INIT_WAIT_SEC ports: - - "27018:27017" + - '27018:27017' networks: - databases mongo-express: image: mongo-express - restart: always + restart: unless-stopped ports: - 8081:8081 environment: - ME_CONFIG_MONGODB_ADMINUSERNAME: "prisma" - ME_CONFIG_MONGODB_ADMINPASSWORD: "prisma" + ME_CONFIG_MONGODB_ADMINUSERNAME: 'prisma' + ME_CONFIG_MONGODB_ADMINPASSWORD: 'prisma' ME_CONFIG_MONGODB_URL: mongodb://prisma:prisma@mongo4-single:27017/ networks: - databases otel: image: jaegertracing/all-in-one:1.35 + restart: unless-stopped environment: - COLLECTOR_OTLP_ENABLED: "true" - COLLECTOR_ZIPKIN_HOST_PORT: ":9411" + COLLECTOR_OTLP_ENABLED: 'true' + COLLECTOR_ZIPKIN_HOST_PORT: ':9411' ports: - 6831:6831/udp - 6832:6832/udp @@ -450,6 +453,7 @@ services: prometheus: image: prom/prometheus + restart: unless-stopped volumes: - ${PWD}/metrics/prometheus:/prometheus-data command: --config.file=/prometheus-data/prometheus.yml diff --git a/quaint/docker-compose.yml b/quaint/docker-compose.yml index ec3c06faa289..47f1a3456a6e 100644 --- a/quaint/docker-compose.yml +++ b/quaint/docker-compose.yml @@ -1,14 +1,14 @@ -version: "3" +version: '3' services: postgres13: image: postgres:13 - restart: always + restart: unless-stopped command: postgres -c 'max_connections=1000' environment: - POSTGRES_PASSWORD: "prisma" - PGDATA: "/pgtmpfs13" + POSTGRES_PASSWORD: 'prisma' + PGDATA: '/pgtmpfs13' ports: - - "5432:5432" + - '5432:5432' networks: - databases tmpfs: /pgtmpfs12 @@ -16,13 +16,13 @@ services: mysql57: image: mysql:5.7 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3306:3306" + - '3306:3306' networks: - databases tmpfs: /var/lib/mysql5.7 @@ -30,48 +30,48 @@ services: mysql8: image: mysql:8.0.22 command: mysqld - restart: always + restart: unless-stopped platform: linux/x86_64 environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3307:3306" + - '3307:3306' networks: - databases tmpfs: /var/lib/mysql8 mariadb: image: mariadb:10 - restart: always + restart: unless-stopped environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma MYSQL_DATABASE: prisma ports: - - "3308:3306" + - '3308:3306' networks: - databases tmpfs: /var/lib/mariadb mssql: image: mcr.microsoft.com/mssql/server:2022-latest - restart: always + restart: unless-stopped environment: - ACCEPT_EULA: "Y" - SA_PASSWORD: "" + ACCEPT_EULA: 'Y' + SA_PASSWORD: '' ports: - - "1433:1433" + - '1433:1433' networks: - databases cockroach_22_2: image: prismagraphql/cockroachdb-custom:22.2 - restart: always + restart: unless-stopped command: start-single-node --insecure ports: - - "26259:26257" + - '26259:26257' networks: - databases From de2449110135e91857b477c346b7f74d52d61613 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 24 Oct 2023 16:29:28 +0200 Subject: [PATCH 07/12] chore(deps): update dependency node to v20.8.1 (#4204) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- query-engine/driver-adapters/js/.nvmrc | 2 +- query-engine/query-engine-wasm/.nvmrc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/query-engine/driver-adapters/js/.nvmrc b/query-engine/driver-adapters/js/.nvmrc index 8c60e1e54f37..6569dfa4f323 100644 --- a/query-engine/driver-adapters/js/.nvmrc +++ b/query-engine/driver-adapters/js/.nvmrc @@ -1 +1 @@ -v20.5.1 +20.8.1 diff --git a/query-engine/query-engine-wasm/.nvmrc b/query-engine/query-engine-wasm/.nvmrc index 8c60e1e54f37..6569dfa4f323 100644 --- a/query-engine/query-engine-wasm/.nvmrc +++ b/query-engine/query-engine-wasm/.nvmrc @@ -1 +1 @@ -v20.5.1 +20.8.1 From f365956fa36e50f1c89d8ffe3997d512ab2d6fec Mon Sep 17 00:00:00 2001 From: Robert Craigie Date: Wed, 25 Oct 2023 15:55:21 +0100 Subject: [PATCH 08/12] fix(qe): correct /status route response body (#4246) --- query-engine/query-engine/src/server/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/query-engine/src/server/mod.rs b/query-engine/query-engine/src/server/mod.rs index 75543dc7ee58..f3583df310d7 100644 --- a/query-engine/query-engine/src/server/mod.rs +++ b/query-engine/query-engine/src/server/mod.rs @@ -63,7 +63,7 @@ pub(crate) async fn routes(cx: Arc, req: Request) -> Result let mut res = match (req.method(), req.uri().path()) { (&Method::POST, "/") => request_handler(cx, req).await?, (&Method::GET, "/") if cx.enabled_features.contains(Feature::Playground) => playground_handler(), - (&Method::GET, "/status") => build_json_response(StatusCode::OK, r#"{"status":"ok"}"#), + (&Method::GET, "/status") => build_json_response(StatusCode::OK, &json!({"status": "ok"})), (&Method::GET, "/sdl") => { let schema = render_graphql_schema(cx.query_schema()); From 46fa0396e2de9ab6ec99c48bd342bc513b032648 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:44:56 +0200 Subject: [PATCH 09/12] ci: do not skip the buildkite pipeline when previous commit is empty (#4385) --- .buildkite/engineer | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index bf31a6e371df..98b78284eaf2 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -9,24 +9,35 @@ else echo "We are in the $2 pipeline." fi +# Checks what's the diff with the previous commit +# This is used to detect if the previous commit was empty +GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- .) + # Checks what's the diff with the previous commit, # excluding some paths that do not need a run, # because they do not affect tests running in Buildkite. -GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) +GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) # $2 is either "test" or "build", depending on the pipeline # Example: ./.buildkite/engineer pipeline test # We only want to check for changes and skip in the test pipeline. if [[ "$2" == "test" ]]; then - # Checking if GIT_DIFF is empty - # If it's empty then it's most likely that there are changes but they are in ignored paths. - # So we do not start Buildkite + # If GIT_DIFF is empty then the previous commit was empty + # We assume it's intended and we continue with the run + # Example use: to get a new engine hash built with identical code if [ -z "${GIT_DIFF}" ]; then - echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." - exit 0 + echo "The previous commit is empty, this run will continue..." else - # Note that printf works better for displaying line returns in CI - printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF}" + # Checking if GIT_DIFF_WITH_IGNORED_PATHS is empty + # If it's empty then it's most likely that there are changes but they are in ignored paths. + # So we do not start Buildkite + if [ -z "${GIT_DIFF_WITH_IGNORED_PATHS}" ]; then + echo "No changes found for the previous commit in paths that are not ignored, this run will now be skipped." + exit 0 + else + # Note that printf works better for displaying line returns in CI + printf "Changes found for the previous commit in paths that are not ignored: \n\n%s\n\nThis run will continue...\n" "${GIT_DIFF_WITH_IGNORED_PATHS}" + fi fi fi From 51d8349124b96b4c636526990eba13a691d553a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:45:25 +0200 Subject: [PATCH 10/12] chore: login to Docker only if Docker credentials are truthy (#4381) --- .github/workflows/query-engine-black-box.yml | 1 + .github/workflows/query-engine-driver-adapters.yml | 1 + .github/workflows/query-engine.yml | 1 + .github/workflows/schema-engine.yml | 1 + 4 files changed, 4 insertions(+) diff --git a/.github/workflows/query-engine-black-box.yml b/.github/workflows/query-engine-black-box.yml index 78e60178d7f7..a941588dfd8e 100644 --- a/.github/workflows/query-engine-black-box.yml +++ b/.github/workflows/query-engine-black-box.yml @@ -50,6 +50,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine-driver-adapters.yml b/.github/workflows/query-engine-driver-adapters.yml index d52b446b12fb..5b34b9761c4c 100644 --- a/.github/workflows/query-engine-driver-adapters.yml +++ b/.github/workflows/query-engine-driver-adapters.yml @@ -68,6 +68,7 @@ jobs: - name: 'Login to Docker Hub' uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/query-engine.yml b/.github/workflows/query-engine.yml index 9c242217662d..3df596e20d61 100644 --- a/.github/workflows/query-engine.yml +++ b/.github/workflows/query-engine.yml @@ -80,6 +80,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/schema-engine.yml b/.github/workflows/schema-engine.yml index 5bdf25a2bd35..c6249f069091 100644 --- a/.github/workflows/schema-engine.yml +++ b/.github/workflows/schema-engine.yml @@ -113,6 +113,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 continue-on-error: true + if: "${{ secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}" with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} From 473ee41d8162d802413a60f9b23238b8e5648fd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 13:48:54 +0200 Subject: [PATCH 11/12] ci(biuildkite): skip test&build for changes in query-engine/query-engine-wasm (#4371) --- .buildkite/engineer | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/engineer b/.buildkite/engineer index 98b78284eaf2..5de99cea5390 100755 --- a/.buildkite/engineer +++ b/.buildkite/engineer @@ -16,7 +16,7 @@ GIT_DIFF=$(git diff --name-only HEAD HEAD~1 -- .) # Checks what's the diff with the previous commit, # excluding some paths that do not need a run, # because they do not affect tests running in Buildkite. -GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) +GIT_DIFF_WITH_IGNORED_PATHS=$(git diff --name-only HEAD HEAD~1 -- . ':!.github' ':!query-engine/driver-adapters/js' ':!query-engine/query-engine-wasm' ':!renovate.json' ':!*.md' ':!LICENSE' ':!CODEOWNERS';) # $2 is either "test" or "build", depending on the pipeline # Example: ./.buildkite/engineer pipeline test From 9c1efedeb581438e6d20860d939957bc093154a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 26 Oct 2023 14:35:07 +0200 Subject: [PATCH 12/12] ci(schema-wasm): cleanup the GitHub Action (#4370) Co-authored-by: Jan Piotrowski --- .../workflows/publish-prisma-schema-wasm.yml | 27 +++++++------------ 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index f453811009ce..070bf528654a 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -1,6 +1,7 @@ name: Build and publish @prisma/prisma-schema-wasm +run-name: npm - release @prisma/prisma-schema-wasm@${{ github.event.inputs.enginesWrapperVersion }} from ${{ github.event.inputs.enginesHash }} on ${{ github.event.inputs.npmDistTag }} -concurrency: build-prisma-schema-wasm +concurrency: publish-prisma-schema-wasm on: # usually triggered via GH Actions Workflow in prisma/engines-wrapper repo @@ -30,25 +31,18 @@ jobs: ref: ${{ github.event.inputs.enginesHash }} - uses: cachix/install-nix-action@v23 - # - # Build - # - - - run: nix build .#prisma-schema-wasm - - # - # Publish - # + - name: Build + run: nix build .#prisma-schema-wasm - uses: actions/setup-node@v3 with: - node-version: '14.x' + node-version: '20.x' + registry-url: 'https://registry.npmjs.org/' - - name: Set up NPM token - run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc - - - run: | - PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) + - name: Update version in package.json & Publish @prisma/prisma-schema-wasm + run: + # Update version in package.json and return directory for later usage + PACKAGE_DIR=$( nix run .#renderPrismaSchemaWasmPackage ${{ github.event.inputs.enginesWrapperVersion }}) npm publish "$PACKAGE_DIR" --access public --tag ${{ github.event.inputs.npmDistTag }} env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} @@ -60,7 +54,6 @@ jobs: - name: Set current job url in SLACK_FOOTER env var if: ${{ failure() }} run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - - name: Slack Notification on Failure if: ${{ failure() }} uses: rtCamp/action-slack-notify@v2.2.1