diff --git a/Cargo.lock b/Cargo.lock index ec3c6a172c1b..e1e0ae3f8776 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2223,12 +2223,14 @@ dependencies = [ ] [[package]] -name = "json-rpc-api-build" +name = "json-rpc-api" version = "0.1.0" dependencies = [ "heck 0.4.1", "serde", "toml", + "tsify-next", + "wasm-bindgen", ] [[package]] @@ -4733,7 +4735,7 @@ dependencies = [ "async-trait", "chrono", "enumflags2", - "json-rpc-api-build", + "json-rpc-api", "jsonrpc-core", "mongodb-schema-connector", "psl", diff --git a/Cargo.toml b/Cargo.toml index a2fee6cdbe9a..10b710ea2029 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ members = [ "schema-engine/core", "schema-engine/connectors/*", "schema-engine/datamodel-renderer", - "schema-engine/json-rpc-api-build", + "schema-engine/json-rpc-api", "schema-engine/mongodb-schema-describer", "schema-engine/sql-migration-tests", "schema-engine/sql-introspection-tests", diff --git a/schema-engine/core/Cargo.toml b/schema-engine/core/Cargo.toml index 263e9cd0909d..898921e9609f 100644 --- a/schema-engine/core/Cargo.toml +++ b/schema-engine/core/Cargo.toml @@ -16,6 +16,7 @@ user-facing-errors = { workspace = true, features = [ "quaint", ] } +json-rpc = { path = "../json-rpc-api", package = "json-rpc-api" } async-trait.workspace = true chrono.workspace = true enumflags2.workspace = true @@ -28,8 +29,5 @@ tracing-subscriber.workspace = true tracing-futures.workspace = true url.workspace = true -[build-dependencies] -json-rpc-api-build = { path = "../json-rpc-api-build" } - [features] vendored-openssl = ["sql-schema-connector/vendored-openssl"] diff --git a/schema-engine/core/build.rs b/schema-engine/core/build.rs deleted file mode 100644 index 50e555a31684..000000000000 --- a/schema-engine/core/build.rs +++ /dev/null @@ -1,7 +0,0 @@ -use std::{env, path::Path}; - -fn main() { - let out_dir = env::var_os("OUT_DIR").unwrap(); - let out_dir = Path::new(&out_dir); - json_rpc_api_build::generate_rust_modules(out_dir).unwrap(); -} diff --git a/schema-engine/core/src/api.rs b/schema-engine/core/src/api.rs index 1c0cb5d59b53..1b31fef33bb6 100644 --- a/schema-engine/core/src/api.rs +++ b/schema-engine/core/src/api.rs @@ -1,6 +1,7 @@ //! The external facing programmatic API to the schema engine. -use crate::{commands, json_rpc::types::*, CoreResult}; +use crate::{commands, CoreResult}; +use json_rpc::types::*; /// The programmatic, generic, fantastic schema engine API. #[async_trait::async_trait] diff --git a/schema-engine/core/src/lib.rs b/schema-engine/core/src/lib.rs index 3c0a2bf6d6a1..9f0d69892a11 100644 --- a/schema-engine/core/src/lib.rs +++ b/schema-engine/core/src/lib.rs @@ -4,7 +4,7 @@ //! The top-level library crate for the schema engine. -include!(concat!(env!("OUT_DIR"), "/methods.rs")); +pub use json_rpc; // exposed for tests #[doc(hidden)] diff --git a/schema-engine/core/src/rpc.rs b/schema-engine/core/src/rpc.rs index b5467260f638..56a83af54b42 100644 --- a/schema-engine/core/src/rpc.rs +++ b/schema-engine/core/src/rpc.rs @@ -1,4 +1,5 @@ -use crate::{json_rpc::method_names::*, CoreError, CoreResult, GenericApi}; +use crate::{CoreError, CoreResult, GenericApi}; +use json_rpc::method_names::*; use jsonrpc_core::{types::error::Error as JsonRpcError, IoHandler, Params}; use psl::SourceFile; use std::sync::Arc; diff --git a/schema-engine/core/src/state.rs b/schema-engine/core/src/state.rs index b0ae38d95e4f..7d19e47659c6 100644 --- a/schema-engine/core/src/state.rs +++ b/schema-engine/core/src/state.rs @@ -3,10 +3,9 @@ //! Why this rather than using connectors directly? We must be able to use the schema engine //! without a valid schema or database connection for commands like createDatabase and diff. -use crate::{ - api::GenericApi, commands, json_rpc::types::*, parse_configuration_multi, CoreError, CoreResult, SchemaContainerExt, -}; +use crate::{api::GenericApi, commands, parse_configuration_multi, CoreError, CoreResult, SchemaContainerExt}; use enumflags2::BitFlags; +use json_rpc::types::*; use psl::{parser_database::SourceFile, PreviewFeature}; use schema_connector::{ConnectorError, ConnectorHost, IntrospectionResult, Namespaces, SchemaConnector}; use std::{ diff --git a/schema-engine/json-rpc-api-build/Cargo.toml b/schema-engine/json-rpc-api-build/Cargo.toml deleted file mode 100644 index 288832a1e19f..000000000000 --- a/schema-engine/json-rpc-api-build/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -name = "json-rpc-api-build" -version = "0.1.0" -edition = "2021" - -[dependencies] -heck.workspace = true -serde.workspace = true -toml.workspace = true diff --git a/schema-engine/json-rpc-api-build/README.md b/schema-engine/json-rpc-api-build/README.md deleted file mode 100644 index 329b54fcb7da..000000000000 --- a/schema-engine/json-rpc-api-build/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# schema-engine/json-rpc-api-build - -The build script implementation for generating documentation and types from the -schema engine JSON-RPC API reference. - -The `methods` folder contains the API definition. This should be considered as -a _contract_ with the TypeScript CLI. - -The `src` folder contains the build script. This should be considered an -implementation detail of `migration-core`. diff --git a/schema-engine/json-rpc-api-build/methods/applyMigrations.toml b/schema-engine/json-rpc-api-build/methods/applyMigrations.toml deleted file mode 100644 index 8768c6f913ef..000000000000 --- a/schema-engine/json-rpc-api-build/methods/applyMigrations.toml +++ /dev/null @@ -1,29 +0,0 @@ -[methods.applyMigrations] -description = """ -Apply the migrations from the migrations directory to the database. - -This is the command behind `prisma migrate deploy`. -""" -requestShape = "applyMigrationsInput" -responseShape = "applyMigrationsOutput" - -[recordShapes.applyMigrationsInput] -description = "The input to the `applyMigrations` command." - -[recordShapes.applyMigrationsInput.fields.migrationsDirectoryPath] -shape= "string" -description = """ -The location of the migrations directory. -""" - -[recordShapes.applyMigrationsOutput] -description = """ -The output of the `applyMigrations` command. -""" - -[recordShapes.applyMigrationsOutput.fields.appliedMigrationNames] -description = """ -The names of the migrations that were just applied. Empty if no migration was applied. -""" -isList = true -shape = "string" diff --git a/schema-engine/json-rpc-api-build/methods/common.toml b/schema-engine/json-rpc-api-build/methods/common.toml deleted file mode 100644 index efee7dfb4de3..000000000000 --- a/schema-engine/json-rpc-api-build/methods/common.toml +++ /dev/null @@ -1,41 +0,0 @@ -# Common types - -[enumShapes.DatasourceParam] -description = """ -The path to a live database taken as input. For flexibility, this can be Prisma schemas as strings, or only the -connection string. See variants. -""" - -[enumShapes.DatasourceParam.variants.Schema] -shape = "SchemasContainer" - -[enumShapes.DatasourceParam.variants.ConnectionString] -shape = "UrlContainer" - -[recordShapes.SchemasContainer] -description = "A container that holds multiple Prisma schema files." -fields.files.shape = "SchemaContainer" -fields.files.isList = true - -[recordShapes.SchemaContainer] -description = "A container that holds the path and the content of a Prisma schema file." - -fields.content.description = "The content of the Prisma schema file." -fields.content.shape = "string" - -fields.path.shape = "string" -fields.path.description = "The file name of the Prisma schema file." - -[recordShapes.SchemasWithConfigDir] -description = "A list of Prisma schema files with a config directory." - -fields.files.description = "A list of Prisma schema files." -fields.files.shape = "SchemaContainer" -fields.files.isList = true - -fields.configDir.description = "An optional directory containing the config files such as SSL certificates." -fields.configDir.shape = "string" - -[recordShapes.UrlContainer] -description = "An object with a `url` field." -fields.url.shape = "string" \ No newline at end of file diff --git a/schema-engine/json-rpc-api-build/methods/createDatabase.toml b/schema-engine/json-rpc-api-build/methods/createDatabase.toml deleted file mode 100644 index aa58b4f602c2..000000000000 --- a/schema-engine/json-rpc-api-build/methods/createDatabase.toml +++ /dev/null @@ -1,12 +0,0 @@ -[methods.createDatabase] -description = """ -Create the logical database from the Prisma schema. -""" -requestShape = "CreateDatabaseParams" -responseShape = "CreateDatabaseResult" - -[recordShapes.CreateDatabaseParams] -fields.datasource.shape = "DatasourceParam" - -[recordShapes.CreateDatabaseResult] -fields.databaseName.shape = "string" diff --git a/schema-engine/json-rpc-api-build/methods/createMigration.toml b/schema-engine/json-rpc-api-build/methods/createMigration.toml deleted file mode 100644 index f6c89e5814cc..000000000000 --- a/schema-engine/json-rpc-api-build/methods/createMigration.toml +++ /dev/null @@ -1,46 +0,0 @@ -[methods.createMigration] -description = """ -Create the next migration in the migrations history. If `draft` is false and -there are no unexecutable steps, it will also apply the newly created -migration. - -**Note**: This will use the shadow database on the connectors where we need -one. -""" -requestShape = "createMigrationInput" -responseShape = "createMigrationOutput" - -[recordShapes.createMigrationInput] -description = "The input to the `createMigration` command." - -[recordShapes.createMigrationInput.fields.draft] -description = "If true, always generate a migration, but do not apply." -shape = "bool" - -[recordShapes.createMigrationInput.fields.migrationName] -description = "The user-given name for the migration. This will be used for the migration directory." -shape = "string" - -[recordShapes.createMigrationInput.fields.migrationsDirectoryPath] -description = "The filesystem path of the migrations directory to use." -shape = "string" - -[recordShapes.createMigrationInput.fields.schema] -description = "The Prisma schema files to use as a target for the generated migration." -shape = "SchemasContainer" - -[recordShapes.createMigrationOutput] -description = "The output of the `creatMigration` command." - -[recordShapes.createMigrationOutput.fields.generatedMigrationName] -description = """ -The name of the newly generated migration directory, if any. - -generatedMigrationName will be null if: - -1. The migration we generate would be empty, **AND** -2. the `draft` param was not true, because in that case the engine would still generate an empty - migration script. -""" -isNullable = true -shape = "string" diff --git a/schema-engine/json-rpc-api-build/methods/dbExecute.toml b/schema-engine/json-rpc-api-build/methods/dbExecute.toml deleted file mode 100644 index e60342761761..000000000000 --- a/schema-engine/json-rpc-api-build/methods/dbExecute.toml +++ /dev/null @@ -1,25 +0,0 @@ -[methods.dbExecute] -description = """ -Execute a database script directly on the specified live database.\n\nNote that this may not be -defined on all connectors. -""" -requestShape = "DbExecuteParams" -responseShape = "DbExecuteResult" - -[recordShapes.DbExecuteParams] -description = "The type of params accepted by dbExecute." -fields.datasourceType.description = "The location of the live database to connect to." -fields.datasourceType.shape = "DbExecuteDatasourceType" -fields.script.description = "The input script." -fields.script.shape = "string" - -[recordShapes.DbExecuteResult] -description = "The type of results returned by dbExecute." - -[enumShapes.DbExecuteDatasourceType] -description = "The location of the live database to connect to." -variants.schema.description = "Prisma schema files and content to take the datasource URL from." -variants.schema.shape = "SchemasWithConfigDir" - -variants.url.description = "The URL of the database to run the command on." -variants.url.shape = "UrlContainer" diff --git a/schema-engine/json-rpc-api-build/methods/debugPanic.toml b/schema-engine/json-rpc-api-build/methods/debugPanic.toml deleted file mode 100644 index 8e3ad484eb54..000000000000 --- a/schema-engine/json-rpc-api-build/methods/debugPanic.toml +++ /dev/null @@ -1,8 +0,0 @@ -[methods.debugPanic] -description = "Make the schema engine panic. Only useful to test client error handling." -requestShape = "debugPanicInput" -responseShape = "debugPanicOutput" - -[recordShapes.debugPanicInput] - -[recordShapes.debugPanicOutput] diff --git a/schema-engine/json-rpc-api-build/methods/devDiagnostic.toml b/schema-engine/json-rpc-api-build/methods/devDiagnostic.toml deleted file mode 100644 index bfe28cd85c5c..000000000000 --- a/schema-engine/json-rpc-api-build/methods/devDiagnostic.toml +++ /dev/null @@ -1,39 +0,0 @@ -[methods.devDiagnostic] -description = """ -The method called at the beginning of `migrate dev` to decide the course of -action based on the current state of the workspace. - -It acts as a wrapper around diagnoseMigrationHistory. Its role is to interpret -the diagnostic output, and translate it to a concrete action to be performed by -the CLI. -""" -requestShape = "devDiagnosticInput" -responseShape = "devDiagnosticOutput" - -[recordShapes.devActionReset.fields.reason] -description = "Why do we need to reset?" -shape = "string" - -[recordShapes.devDiagnosticInput] -description = "The request type for `devDiagnostic`." - -[recordShapes.devDiagnosticInput.fields.migrationsDirectoryPath] -description = "The location of the migrations directory." -shape = "string" - -[recordShapes.devDiagnosticOutput] -description = "The response type for `devDiagnostic`." - -[recordShapes.devDiagnosticOutput.fields.action] -description = "The suggested course of action for the CLI." -shape = "devAction" - -[enumShapes.devAction] -description = "A suggested action for the CLI `migrate dev` command." - -[enumShapes.devAction.variants.reset] -description = "Reset the database." -shape = "devActionReset" - -[enumShapes.devAction.variants.createMigration] -description = "Proceed to the next step" diff --git a/schema-engine/json-rpc-api-build/methods/diagnoseMigrationHistory.toml b/schema-engine/json-rpc-api-build/methods/diagnoseMigrationHistory.toml deleted file mode 100644 index d2330262c560..000000000000 --- a/schema-engine/json-rpc-api-build/methods/diagnoseMigrationHistory.toml +++ /dev/null @@ -1,70 +0,0 @@ -[methods.diagnoseMigrationHistory] -description = """ -Read the contents of the migrations directory and the migrations table, -and returns their relative statuses. At this stage, the migration -engine only reads, it does not write to the database nor the migrations -directory, nor does it use a shadow database. -""" -requestShape = "diagnoseMigrationHistoryInput" -responseShape = "diagnoseMigrationHistoryOutput" - -[recordShapes.diagnoseMigrationHistoryInput] -description = "The request params for the `diagnoseMigrationHistory` method." -fields.migrationsDirectoryPath.description = "The path to the root of the migrations directory." -fields.migrationsDirectoryPath.shape = "string" -fields.optInToShadowDatabase.description = """ -Whether creating a shadow database is allowed. -""" -fields.optInToShadowDatabase.shape = "bool" - -[recordShapes.diagnoseMigrationHistoryOutput] -description = """ -The result type for `diagnoseMigrationHistory` responses. -""" - -[recordShapes.diagnoseMigrationHistoryOutput.fields.editedMigrationNames] -description = """ -The names of the migrations for which the checksum of the script in the -migration directory does not match the checksum of the applied migration -in the database. -""" -isList = true -shape = "string" - -[recordShapes.diagnoseMigrationHistoryOutput.fields.failedMigrationNames] -description = """ -The names of the migrations that are currently in a failed state in the migrations table. -""" -isList = true -shape = "string" - -[recordShapes.diagnoseMigrationHistoryOutput.fields.hasMigrationsTable] -description = "Is the migrations table initialized/present in the database?" -shape = "bool" - -[recordShapes.diagnoseMigrationHistoryOutput.fields.history] -description = """ -The current status of the migration history of the database -relative to migrations directory. `null` if they are in sync and up -to date. -""" -isNullable = true -shape = "HistoryDiagnostic" - -[enumShapes.HistoryDiagnostic] -description = """ -A diagnostic returned by `diagnoseMigrationHistory` when looking at the -database migration history in relation to the migrations directory. -""" - -[enumShapes.HistoryDiagnostic.variants.MigrationsDirectoryIsBehind] -[enumShapes.HistoryDiagnostic.variants.HistoriesDiverge] - -[enumShapes.HistoryDiagnostic.variants.DatabaseIsBehind] -description = """ -There are migrations in the migrations directory that have not been applied to -the database yet. -""" -shape = "DatabaseIsBehindFields" - -[recordShapes.DatabaseIsBehindFields] diff --git a/schema-engine/json-rpc-api-build/methods/diff.toml b/schema-engine/json-rpc-api-build/methods/diff.toml deleted file mode 100644 index 5e3e380d90a0..000000000000 --- a/schema-engine/json-rpc-api-build/methods/diff.toml +++ /dev/null @@ -1,123 +0,0 @@ -[methods.diff] -description = """ -Compares two databases schemas from two arbitrary sources, and display the -difference as either a human-readable summary, or an executable script that can -be passed to dbExecute. - -Connection to a shadow database is only necessary when either the `from` or the -`to` params is a migrations directory. - -Diffs have a _direction_. Which source is `from` and which is `to` matters. The -resulting diff should be thought as a migration from the schema in `from` to -the schema in `to`. - -By default, we output a human-readable diff. If you want an executable script, -pass the `"script": true` param. -""" -requestShape = "diffParams" -responseShape = "diffResult" - -[recordShapes.diffParams] -description = "The type of params for the `diff` method." -# Example: generate a new migration -example = """ -{ - "from": { - "tag": "migrations", - "path": "./prisma/migrations" - }, - "to": { - "tag": "schemaDatamodel", - "schema": "./prisma/schema.prisma", - } - "shadowDatabaseUrl": "mysql://test/test" -} -""" - -[recordShapes.diffParams.fields.from] -description = """ -The source of the schema to consider as a _starting point_. -""" -shape = "DiffTarget" - -[recordShapes.diffParams.fields.to] -description = """ -The source of the schema to consider as a _destination_, or the desired -end-state. -""" -shape = "DiffTarget" - -[recordShapes.diffParams.fields.shadowDatabaseUrl] -description = """ -The URL to a live database to use as a shadow database. The schema and data on -that database will be wiped during diffing. - -This is only necessary when one of `from` or `to` is referencing a migrations -directory as a source for the schema. -""" -isNullable = true -shape = "string" - -[recordShapes.diffParams.fields.script] -description = """ -By default, the response will contain a human-readable diff. If you want an -executable script, pass the `"script": true` param. -""" -shape = "bool" - -[recordShapes.diffParams.fields.exitCode] -description = """ -Whether the --exit-code param was passed. - -If this is set, the engine will return exitCode = 2 in the diffResult in case the diff is -non-empty. Other than this, it does not change the behaviour of the command. -""" -isNullable = true -shape = "bool" - -[recordShapes.diffResult] -description = "The result type for the `diff` method." - -[recordShapes.diffResult.fields.exitCode] -description = "The exit code that the CLI should return." -shape = "u32" - -[enumShapes.DiffTarget] -description = "A supported source for a database schema to diff in the `diff` command." - -[enumShapes.DiffTarget.variants.empty] -description = "An empty schema." - -[enumShapes.DiffTarget.variants.schemaDatasource] -description = """ -The path to a Prisma schema. The _datasource url_ will be considered, and the -live database it points to introspected for its schema. -""" -shape = "SchemasWithConfigDir" - -[enumShapes.DiffTarget.variants.schemaDatamodel] -description = """ -The path to a Prisma schema. The contents of the schema itself will be -considered. This source does not need any database connection. -""" -shape = "SchemasContainer" - -[enumShapes.DiffTarget.variants.url] -description = """ -The url to a live database. Its schema will be considered. - -This will cause the schema engine to connect to the database and read from it. -It will not write. -""" -shape = "UrlContainer" - -[enumShapes.DiffTarget.variants.migrations] -description = """ -The path to a migrations directory of the shape expected by Prisma Migrate. The -migrations will be applied to a **shadow database**, and the resulting schema -considered for diffing. -""" -shape = "PathContainer" - -[recordShapes.PathContainer] -fields.path.shape = "string" diff --git a/schema-engine/json-rpc-api-build/methods/ensureConnectionValidity.toml b/schema-engine/json-rpc-api-build/methods/ensureConnectionValidity.toml deleted file mode 100644 index e042691e4867..000000000000 --- a/schema-engine/json-rpc-api-build/methods/ensureConnectionValidity.toml +++ /dev/null @@ -1,12 +0,0 @@ -[methods.ensureConnectionValidity] -description = """ -Make sure the schema engine can connect to the database from the Prisma schema. -""" -requestShape = "EnsureConnectionValidityParams" -responseShape = "EnsureConnectionValidityResult" - -[recordShapes.EnsureConnectionValidityParams] -fields.datasource.shape = "DatasourceParam" - -[recordShapes.EnsureConnectionValidityResult] - diff --git a/schema-engine/json-rpc-api-build/methods/evaluateDataLoss.toml b/schema-engine/json-rpc-api-build/methods/evaluateDataLoss.toml deleted file mode 100644 index 5875e5695f9e..000000000000 --- a/schema-engine/json-rpc-api-build/methods/evaluateDataLoss.toml +++ /dev/null @@ -1,66 +0,0 @@ -[methods.evaluateDataLoss] -description = """ -Development command for migrations. Evaluate the data loss induced by the next -migration the engine would generate on the main database. - -At this stage, the engine does not create or mutate anything in the database -nor in the migrations directory. - -This is part of the `migrate dev` flow. - -**Note**: the engine currently assumes the main database schema is up-to-date -with the migration history. -""" -requestShape = "evaluateDataLossInput" -responseShape = "evaluateDataLossOutput" - -[recordShapes.evaluateDataLossInput] -description = "The input to the `evaluateDataLoss` command." - -[recordShapes.evaluateDataLossInput.fields.migrationsDirectoryPath] -description = "The location of the migrations directory." -shape = "string" - -[recordShapes.evaluateDataLossInput.fields.schema] -description = "The prisma schema files to migrate to." -shape = "SchemasContainer" - -[recordShapes.evaluateDataLossOutput] -description = """ -The output of the `evaluateDataLoss` command. -""" - -[recordShapes.evaluateDataLossOutput.fields.migrationSteps] -description = """The number migration steps that would be generated. If this is empty, we -wouldn't generate a new migration, unless the `draft` option is -passed. -""" -shape = "u32" - -[recordShapes.evaluateDataLossOutput.fields.unexecutableSteps] -description = """ -Steps that cannot be executed on the local database in the -migration that would be generated. -""" -isList = true -shape = "migrationFeedback" - -[recordShapes.evaluateDataLossOutput.fields.warnings] -description = """ -Destructive change warnings for the local database. These are the -warnings *for the migration that would be generated*. This does not -include other potentially yet unapplied migrations. -""" -isList = true -shape = "migrationFeedback" - -[recordShapes.migrationFeedback] -description = "A data loss warning or an unexecutable migration error, associated with the step that triggered it." - -[recordShapes.migrationFeedback.fields.message] -description = "The human-readable message." -shape = "string" - -[recordShapes.migrationFeedback.fields.stepIndex] -description = "The index of the step this pertains to." -shape = "u32" diff --git a/schema-engine/json-rpc-api-build/methods/getDatabaseVersion.toml b/schema-engine/json-rpc-api-build/methods/getDatabaseVersion.toml deleted file mode 100644 index c1955e442fa8..000000000000 --- a/schema-engine/json-rpc-api-build/methods/getDatabaseVersion.toml +++ /dev/null @@ -1,10 +0,0 @@ -[methods.getDatabaseVersion] -description = "Get the database version for error reporting." -requestShape = "getDatabaseVersionInput" -responseShape = "getDatabaseVersionOutput" - -[recordShapes.getDatabaseVersionInput] -fields.datasource.shape = "DatasourceParam" - -[recordShapes.getDatabaseVersionOutput.fields.version] -shape = "string" diff --git a/schema-engine/json-rpc-api-build/methods/introspect.toml b/schema-engine/json-rpc-api-build/methods/introspect.toml deleted file mode 100644 index 1100d25b1cb7..000000000000 --- a/schema-engine/json-rpc-api-build/methods/introspect.toml +++ /dev/null @@ -1,50 +0,0 @@ -[methods.introspect] -description = "Introspect the database (db pull)" -requestShape = "introspectParams" -responseShape = "introspectResult" - -[recordShapes.introspectParams] -description = "Params type for the introspect method." - -[recordShapes.introspectParams.fields.schema] -shape = "SchemasContainer" - -[recordShapes.introspectParams.fields.baseDirectoryPath] -shape = "string" - -[recordShapes.introspectParams.fields.force] -shape = "bool" - -[recordShapes.introspectParams.fields.compositeTypeDepth] -shape = "isize" - -[recordShapes.introspectParams.fields.namespaces] -shape = "string" -isList = true -isNullable = true - -[recordShapes.introspectResult] -description = "Result type for the introspect method." - -[recordShapes.introspectResult.fields.schema] -shape = "SchemasContainer" - -[recordShapes.introspectResult.fields.warnings] -shape = "string" -isNullable = true - -[recordShapes.introspectResult.fields.views] -shape = "introspectionView" -isNullable = true -isList = true - -[recordShapes.introspectionView] - -[recordShapes.introspectionView.fields.schema] -shape = "string" - -[recordShapes.introspectionView.fields.name] -shape = "string" - -[recordShapes.introspectionView.fields.definition] -shape = "string" diff --git a/schema-engine/json-rpc-api-build/methods/introspectSql.toml b/schema-engine/json-rpc-api-build/methods/introspectSql.toml deleted file mode 100644 index 19eb888da327..000000000000 --- a/schema-engine/json-rpc-api-build/methods/introspectSql.toml +++ /dev/null @@ -1,68 +0,0 @@ -[methods.introspectSql] -description = "Introspect a SQL query and returns type information" -requestShape = "introspectSqlParams" -responseShape = "introspectSqlResult" - -# Input - -[recordShapes.introspectSqlParams] -description = "Params type for the introspectSql method." - -[recordShapes.introspectSqlParams.fields.url] -shape = "string" - -[recordShapes.introspectSqlParams.fields.queries] -shape = "sqlQueryInput" -isList = true - -# Result - -[recordShapes.introspectSqlResult] -description = "Result type for the introspectSql method." - -[recordShapes.introspectSqlResult.fields.queries] -shape = "sqlQueryOutput" -isList = true - -# Containers - -[recordShapes.sqlQueryInput] -[recordShapes.sqlQueryInput.fields.name] -shape = "string" -[recordShapes.sqlQueryInput.fields.source] -shape = "string" - -[recordShapes.sqlQueryOutput] -[recordShapes.sqlQueryOutput.fields.name] -shape = "string" -[recordShapes.sqlQueryOutput.fields.source] -shape = "string" -[recordShapes.sqlQueryOutput.fields.documentation] -isNullable = true -shape = "string" -[recordShapes.sqlQueryOutput.fields.parameters] -shape = "sqlQueryParameterOutput" -isList = true -[recordShapes.sqlQueryOutput.fields.resultColumns] -shape = "sqlQueryColumnOutput" -isList = true - -[recordShapes.sqlQueryParameterOutput] -[recordShapes.sqlQueryParameterOutput.fields.name] -shape = "string" -[recordShapes.sqlQueryParameterOutput.fields.typ] -shape = "string" -[recordShapes.sqlQueryParameterOutput.fields.documentation] -isNullable = true -shape = "string" -[recordShapes.sqlQueryParameterOutput.fields.nullable] -shape = "bool" - -[recordShapes.sqlQueryColumnOutput] -[recordShapes.sqlQueryColumnOutput.fields.name] -shape = "string" -[recordShapes.sqlQueryColumnOutput.fields.typ] -shape = "string" -[recordShapes.sqlQueryColumnOutput.fields.nullable] -shape = "bool" - diff --git a/schema-engine/json-rpc-api-build/methods/listMigrationDirectories.toml b/schema-engine/json-rpc-api-build/methods/listMigrationDirectories.toml deleted file mode 100644 index 247d14f908c2..000000000000 --- a/schema-engine/json-rpc-api-build/methods/listMigrationDirectories.toml +++ /dev/null @@ -1,20 +0,0 @@ -[methods.listMigrationDirectories] -description = "List the names of the migrations in the migrations directory." -requestShape = "listMigrationDirectoriesInput" -responseShape = "listMigrationDirectoriesOutput" - -[recordShapes.listMigrationDirectoriesInput] -description = """ -The input to the `listMigrationDirectories` command. -""" - -[recordShapes.listMigrationDirectoriesInput.fields.migrationsDirectoryPath] -description = "The location of the migrations directory." -shape = "string" - -[recordShapes.listMigrationDirectoriesOutput.fields.migrations] -description = """ -The names of the migrations in the migration directory. Empty if no migrations are found. -""" -isList = true -shape = "string" diff --git a/schema-engine/json-rpc-api-build/methods/markMigrationApplied.toml b/schema-engine/json-rpc-api-build/methods/markMigrationApplied.toml deleted file mode 100644 index ce4522dbb52d..000000000000 --- a/schema-engine/json-rpc-api-build/methods/markMigrationApplied.toml +++ /dev/null @@ -1,27 +0,0 @@ -[methods.markMigrationApplied] -description = """Mark a migration as applied in the migrations table. - -There are two possible outcomes: - -- The migration is already in the table, but in a failed state. In this case, we will mark it - as rolled back, then create a new entry. -- The migration is not in the table. We will create a new entry in the migrations table. The - `started_at` and `finished_at` will be the same. -- If it is already applied, we return a user-facing error. -""" -requestShape = "markMigrationAppliedInput" -responseShape = "markMigrationAppliedOutput" - -[recordShapes.markMigrationAppliedInput] -description = "The names of the migrations in the migration directory. Empty if no migrations are found." - -[recordShapes.markMigrationAppliedInput.fields.migrationName] -description = "The name of the migration to mark applied." -shape = "string" - -[recordShapes.markMigrationAppliedInput.fields.migrationsDirectoryPath] -description = "The path to the root of the migrations directory." -shape = "string" - -[recordShapes.markMigrationAppliedOutput] -description = "The output of the `markMigrationApplied` command." diff --git a/schema-engine/json-rpc-api-build/methods/markMigrationRolledBack.toml b/schema-engine/json-rpc-api-build/methods/markMigrationRolledBack.toml deleted file mode 100644 index c5936609b48f..000000000000 --- a/schema-engine/json-rpc-api-build/methods/markMigrationRolledBack.toml +++ /dev/null @@ -1,19 +0,0 @@ -[methods.markMigrationRolledBack] -description = """ -Mark an existing failed migration as rolled back in the migrations table. It -will still be there, but ignored for all purposes except as audit trail. -""" -requestShape = "markMigrationRolledBackInput" -responseShape = "markMigrationRolledBackOutput" - -[recordShapes.markMigrationRolledBackInput] -description = """ -The input to the `markMigrationRolledBack` command. -""" - -[recordShapes.markMigrationRolledBackInput.fields.migrationName] -description = "The name of the migration to mark rolled back." -shape = "string" - -[recordShapes.markMigrationRolledBackOutput] -description = "The output of the `markMigrationRolledBack` command." diff --git a/schema-engine/json-rpc-api-build/methods/reset.toml b/schema-engine/json-rpc-api-build/methods/reset.toml deleted file mode 100644 index 72872959bbf0..000000000000 --- a/schema-engine/json-rpc-api-build/methods/reset.toml +++ /dev/null @@ -1,17 +0,0 @@ -[methods.reset] -description = """ -Try to make the database empty: no data and no schema. On most connectors, this -is implemented by dropping and recreating the database. If that fails (most -likely because of insufficient permissions), the engine attemps a \"best effort -reset\" by inspecting the contents of the database and dropping them -individually. - -Drop and recreate the database. The migrations will not be applied, as it would -overlap with `applyMigrations`. -""" -requestShape = "resetInput" -responseShape = "resetOutput" - -[recordShapes.resetInput] - -[recordShapes.resetOutput] diff --git a/schema-engine/json-rpc-api-build/methods/schemaPush.toml b/schema-engine/json-rpc-api-build/methods/schemaPush.toml deleted file mode 100644 index b668a6f21b98..000000000000 --- a/schema-engine/json-rpc-api-build/methods/schemaPush.toml +++ /dev/null @@ -1,32 +0,0 @@ -[methods.schemaPush] -description = "The command behind `db push`." -requestShape = "schemaPushInput" -responseShape = "schemaPushOutput" - -[recordShapes.schemaPushInput] -description = "Request params for the `schemaPush` method." - -[recordShapes.schemaPushInput.fields.force] -description = "Push the schema ignoring destructive change warnings." -shape = "bool" - -[recordShapes.schemaPushInput.fields.schema] -description = "The Prisma schema files." -shape = "SchemasContainer" - -[recordShapes.schemaPushOutput] -description = "Response result for the `schemaPush` method." - -[recordShapes.schemaPushOutput.fields.executedSteps] -description = "How many migration steps were executed." -shape = "u32" - -[recordShapes.schemaPushOutput.fields.unexecutable] -description = "Steps that cannot be executed in the current state of the database." -isList = true -shape = "string" - -[recordShapes.schemaPushOutput.fields.warnings] -description = "Destructive change warnings." -isList = true -shape = "string" diff --git a/schema-engine/json-rpc-api-build/src/error.rs b/schema-engine/json-rpc-api-build/src/error.rs deleted file mode 100644 index f0df58637fab..000000000000 --- a/schema-engine/json-rpc-api-build/src/error.rs +++ /dev/null @@ -1,48 +0,0 @@ -use std::{backtrace::Backtrace, error::Error as StdError, fmt::Debug}; - -pub type CrateResult = Result<(), Error>; - -pub struct Error { - source: Option>, - bt: Backtrace, - message: Option, -} - -impl From for Error -where - T: std::error::Error + 'static, -{ - fn from(src: T) -> Self { - Error { - message: Some(src.to_string()), - source: Some(Box::new(src)), - bt: Backtrace::force_capture(), - } - } -} - -impl Debug for Error { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut src: Option<&dyn StdError> = self.source.as_deref(); - let mut indentation_levels = 0; - - if let Some(message) = &self.message { - f.write_str(message)?; - } - - while let Some(source) = src { - f.write_str("\n")?; - - for _ in 0..=indentation_levels { - f.write_str(" ")?; - } - - f.write_fmt(format_args!("Caused by: {source}\n"))?; - - indentation_levels += 1; - src = source.source(); - } - - f.write_fmt(format_args!("{:?}\n", self.bt)) - } -} diff --git a/schema-engine/json-rpc-api-build/src/lib.rs b/schema-engine/json-rpc-api-build/src/lib.rs deleted file mode 100644 index ec01a8d06555..000000000000 --- a/schema-engine/json-rpc-api-build/src/lib.rs +++ /dev/null @@ -1,172 +0,0 @@ -mod error; -mod rust_crate; - -use self::error::CrateResult; -use serde::Deserialize; -use std::{ - collections::{BTreeMap, HashMap}, - path::Path, -}; - -// Note: the easiest way to update the generated JSON-RPC API types is to comment out every line in `schema-engine/core/src/lib.rs` -// but the `include!` macro invocation, then run `cargo build -p schema-core`. -pub fn generate_rust_modules(out_dir: &Path) -> CrateResult { - let api_defs_root = concat!(env!("CARGO_MANIFEST_DIR"), "/methods"); - - // https://doc.rust-lang.org/cargo/reference/build-scripts.html - println!("cargo:rerun-if-changed={api_defs_root}"); - - let entries = std::fs::read_dir(api_defs_root)?; - let mut api = Api::default(); - - for entry in entries { - let entry = entry?; - if !entry.file_type()?.is_file() { - continue; - } - - let contents = std::fs::read_to_string(entry.path())?; - eprintln!("Merging {}", entry.path().file_name().unwrap().to_string_lossy()); - let api_fragment: Api = toml::from_str(&contents)?; - - merge(&mut api, api_fragment); - } - - validate(&api); - - rust_crate::generate_rust_crate(out_dir, &api)?; - - eprintln!("ok: definitions generated"); - - Ok(()) -} - -fn validate(api: &Api) { - let mut errs: Vec = Vec::new(); - - for (method_name, method) in &api.methods { - if !shape_exists(&method.request_shape, api) { - errs.push(format!("Request shape for {method_name} does not exist")) - } - - if !shape_exists(&method.response_shape, api) { - errs.push(format!("Response shape for {method_name} does not exist")) - } - } - - for (record_name, record_shape) in &api.record_shapes { - for (field_name, field) in &record_shape.fields { - if !shape_exists(&field.shape, api) { - errs.push(format!("Field shape for {record_name}.{field_name} does not exist.")) - } - } - } - - for (enum_name, enum_shape) in &api.enum_shapes { - for (variant_name, variant) in &enum_shape.variants { - if let Some(shape) = variant.shape.as_ref() { - if !shape_exists(shape, api) { - errs.push(format!( - "Enum variant shape for {enum_name}.{variant_name} does not exist." - )) - } - } - } - } - - if !errs.is_empty() { - for err in errs { - eprintln!("{err}"); - } - std::process::exit(1); - } -} - -fn shape_exists(shape: &str, api: &Api) -> bool { - let builtin_scalars = ["string", "bool", "u32", "isize", "serde_json::Value"]; - - if builtin_scalars.contains(&shape) { - return true; - } - - if api.enum_shapes.contains_key(shape) { - return true; - } - - if api.record_shapes.contains_key(shape) { - return true; - } - - false -} - -fn merge(api: &mut Api, new_fragment: Api) { - for (method_name, method) in new_fragment.methods { - assert!(api.methods.insert(method_name, method).is_none()); - } - - for (record_name, record) in new_fragment.record_shapes { - assert!(api.record_shapes.insert(record_name, record).is_none()); - } - - for (enum_name, enum_d) in new_fragment.enum_shapes { - assert!(api.enum_shapes.insert(enum_name, enum_d).is_none()); - } -} - -// Make sure #[serde(deny_unknown_fields)] is on all struct types here. -#[derive(Debug, Deserialize, Default)] -#[serde(deny_unknown_fields)] -struct Api { - #[serde(rename = "recordShapes", default)] - record_shapes: HashMap, - #[serde(rename = "enumShapes", default)] - enum_shapes: HashMap, - #[serde(default)] - methods: HashMap, -} - -#[derive(Debug, Deserialize)] -#[serde(deny_unknown_fields)] -struct RecordShape { - description: Option, - #[serde(default)] - fields: BTreeMap, - example: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(deny_unknown_fields)] -struct RecordField { - description: Option, - #[serde(rename = "isList", default)] - is_list: bool, - #[serde(rename = "isNullable", default)] - is_nullable: bool, - shape: String, -} - -#[derive(Debug, Deserialize)] -#[serde(deny_unknown_fields)] -struct EnumVariant { - description: Option, - /// In case there is no shape, it just means the variant has no associated data. - shape: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(deny_unknown_fields)] -struct EnumShape { - description: Option, - variants: HashMap, -} - -#[derive(Debug, Deserialize)] -#[serde(deny_unknown_fields)] -struct Method { - description: Option, - #[serde(rename = "requestShape")] - request_shape: String, - #[serde(rename = "responseShape")] - response_shape: String, -} diff --git a/schema-engine/json-rpc-api-build/src/rust_crate.rs b/schema-engine/json-rpc-api-build/src/rust_crate.rs deleted file mode 100644 index 27a5a5ad7ac6..000000000000 --- a/schema-engine/json-rpc-api-build/src/rust_crate.rs +++ /dev/null @@ -1,163 +0,0 @@ -use crate::{Api, CrateResult}; -use heck::*; -use std::{borrow::Cow, fs::File, io::Write as _, path::Path}; - -pub(crate) fn generate_rust_crate(out_dir: &Path, api: &Api) -> CrateResult { - let librs = out_dir.join("methods.rs"); - let mut librs = std::io::BufWriter::new(File::create(librs)?); - let mut method_names: Vec<&str> = api.methods.keys().map(String::as_str).collect(); - method_names.sort_unstable(); - - librs.write_all(b"pub mod json_rpc {\n")?; - librs.write_all(b"//! The JSON-RPC API definition.\n//!\n//! ## Methods\n//!\n")?; - - for method_name in &method_names { - let method = &api.methods[*method_name]; - - writeln!(librs, "//!\n//! ### 🔌 {method_name}\n")?; - writeln!( - librs, - "//! ➡️ [{request_name}](./types/struct.{request_name}.html)\n//!", - request_name = method.request_shape.to_upper_camel_case() - )?; - writeln!( - librs, - "//! ↩️ [{response_name}](./types/struct.{response_name}.html)\n//!", - response_name = method.response_shape.to_upper_camel_case() - )?; - - if let Some(description) = &method.description { - for line in description.lines() { - writeln!(librs, "//! {line}")?; - } - } - } - - librs.write_all( - b"/// String constants for method names.\npub mod method_names {\n/// Exhaustive list of the names of all JSON-RPC methods.\npub const METHOD_NAMES: &[&str] = &[", - )?; - - for method_name in &method_names { - writeln!(librs, " \"{method_name}\",")?; - } - - writeln!(librs, "];")?; - - for method_name in &method_names { - writeln!( - librs, - "/// {method_name}\npub const {}: &str = \"{method_name}\";", - method_name.to_snake_case().to_shouty_snake_case() - )?; - } - - librs.write_all(b"}\n")?; // close method_names - - generate_types_rs(&mut librs, api)?; - - librs.write_all(b"}\n")?; - - Ok(()) -} - -fn generate_types_rs(mut file: impl std::io::Write, api: &Api) -> CrateResult { - file.write_all( - b"/// API type definitions used by the methods.\n#[allow(missing_docs)] pub mod types {\nuse serde::{Serialize, Deserialize};\n\n", - )?; - - for (type_name, record_type) in &api.record_shapes { - if let Some(description) = &record_type.description { - for line in description.lines() { - writeln!(file, "/// {line}")?; - } - } - - if let Some(example) = &record_type.example { - file.write_all(b"/// ### Example\n///\n/// ```ignore")?; - for line in example.lines() { - file.write_all(b"\n/// ")?; - file.write_all(line.as_bytes())?; - } - file.write_all(b"\n/// ```\n")?; - } - - writeln!(file, "#[derive(Serialize, Deserialize, Debug)]",)?; - - writeln!(file, "pub struct {} {{", rustify_type_name(type_name))?; - - for (field_name, field) in &record_type.fields { - if let Some(description) = &field.description { - for line in description.lines() { - writeln!(file, " /// {line}")?; - } - } - let type_name = rustify_type_name(&field.shape); - let type_name: Cow<'static, str> = match (field.is_list, field.is_nullable) { - (true, true) => format!("Option>").into(), - (false, true) => format!("Option<{type_name}>").into(), - (true, false) => format!("Vec<{type_name}>").into(), - (false, false) => type_name, - }; - let field_name_sc = field_name.to_snake_case(); - if &field_name_sc != field_name { - writeln!(file, " ///\n /// JSON name: {field_name}")?; - writeln!(file, " #[serde(rename = \"{field_name}\")]")?; - } - - writeln!(file, " pub {field_name_sc}: {type_name},")?; - } - writeln!(file, "}}\n")?; - } - - for (type_name, variants) in &api.enum_shapes { - if let Some(description) = &variants.description { - for line in description.lines() { - writeln!(file, "/// {line}")?; - } - } - - writeln!( - file, - "#[derive(Serialize, Deserialize, Debug)]\n#[serde(tag = \"tag\")]\npub enum {} {{", - rustify_type_name(type_name) - )?; - - for (variant_name, variant) in &variants.variants { - if let Some(description) = &variant.description { - for line in description.lines() { - writeln!(file, "/// {line}")?; - } - } - - let cc_variant_name = variant_name.to_upper_camel_case(); - - if cc_variant_name.as_str() != variant_name { - writeln!(file, "///\n/// JSON name: {variant_name}")?; - writeln!(file, "#[serde(rename = \"{variant_name}\")]")?; - } - - if let Some(shape) = &variant.shape { - writeln!(file, " {cc_variant_name}({}),", rustify_type_name(shape))?; - } else { - writeln!(file, " {cc_variant_name},")?; - } - } - - file.write_all(b"}\n")?; - } - - file.write_all(b"}\n")?; - - Ok(()) -} - -fn rustify_type_name(name: &str) -> Cow<'static, str> { - match name { - "bool" => Cow::Borrowed("bool"), - "u32" => Cow::Borrowed("u32"), - "isize" => Cow::Borrowed("isize"), - "string" => Cow::Borrowed("String"), - "serde_json::Value" => Cow::Borrowed("serde_json::Value"), - other => other.to_upper_camel_case().into(), - } -} diff --git a/schema-engine/json-rpc-api/Cargo.toml b/schema-engine/json-rpc-api/Cargo.toml new file mode 100644 index 000000000000..9d2a71c049f3 --- /dev/null +++ b/schema-engine/json-rpc-api/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "json-rpc-api" +version = "0.1.0" +edition = "2024" +description = "JSON-RPC API definitions for the Prisma Schema Engine" + +[dependencies] +heck.workspace = true +serde.workspace = true +toml.workspace = true + +[target.'cfg(target_arch = "wasm32")'.dependencies] +wasm-bindgen.workspace = true +tsify-next.workspace = true diff --git a/schema-engine/json-rpc-api/src/lib.rs b/schema-engine/json-rpc-api/src/lib.rs new file mode 100644 index 000000000000..7db3da041f86 --- /dev/null +++ b/schema-engine/json-rpc-api/src/lib.rs @@ -0,0 +1,10 @@ +//! JSON-RPC API definitions for the Prisma Schema Engine. +//! +//! This crate defines the JSON-RPC API for the Prisma Schema Engine, including +//! all method definitions, request parameters, and response types. + +/// API type definitions used by the methods. +pub mod types; + +/// JSON-RPC API method definitions. +pub mod method_names; diff --git a/schema-engine/json-rpc-api/src/method_names.rs b/schema-engine/json-rpc-api/src/method_names.rs new file mode 100644 index 000000000000..cbbd3b8fe290 --- /dev/null +++ b/schema-engine/json-rpc-api/src/method_names.rs @@ -0,0 +1,42 @@ +//! JSON-RPC API methods. + +pub const APPLY_MIGRATIONS: &str = "applyMigrations"; +pub const CREATE_DATABASE: &str = "createDatabase"; +pub const CREATE_MIGRATION: &str = "createMigration"; +pub const DB_EXECUTE: &str = "dbExecute"; +pub const DEBUG_PANIC: &str = "debugPanic"; +pub const DEV_DIAGNOSTIC: &str = "devDiagnostic"; +pub const DIAGNOSE_MIGRATION_HISTORY: &str = "diagnoseMigrationHistory"; +pub const DIFF: &str = "diff"; +pub const ENSURE_CONNECTION_VALIDITY: &str = "ensureConnectionValidity"; +pub const EVALUATE_DATA_LOSS: &str = "evaluateDataLoss"; +pub const GET_DATABASE_VERSION: &str = "getDatabaseVersion"; +pub const INTROSPECT: &str = "introspect"; +pub const INTROSPECT_SQL: &str = "introspectSql"; +pub const LIST_MIGRATION_DIRECTORIES: &str = "listMigrationDirectories"; +pub const MARK_MIGRATION_APPLIED: &str = "markMigrationApplied"; +pub const MARK_MIGRATION_ROLLED_BACK: &str = "markMigrationRolledBack"; +pub const RESET: &str = "reset"; +pub const SCHEMA_PUSH: &str = "schemaPush"; + +/// Exhaustive list of the names of all JSON-RPC methods. +pub const METHOD_NAMES: &[&str] = &[ + APPLY_MIGRATIONS, + CREATE_DATABASE, + CREATE_MIGRATION, + DB_EXECUTE, + DEBUG_PANIC, + DEV_DIAGNOSTIC, + DIAGNOSE_MIGRATION_HISTORY, + DIFF, + ENSURE_CONNECTION_VALIDITY, + EVALUATE_DATA_LOSS, + GET_DATABASE_VERSION, + INTROSPECT, + INTROSPECT_SQL, + LIST_MIGRATION_DIRECTORIES, + MARK_MIGRATION_APPLIED, + MARK_MIGRATION_ROLLED_BACK, + RESET, + SCHEMA_PUSH, +]; diff --git a/schema-engine/json-rpc-api/src/types.rs b/schema-engine/json-rpc-api/src/types.rs new file mode 100644 index 000000000000..3ea9d414707e --- /dev/null +++ b/schema-engine/json-rpc-api/src/types.rs @@ -0,0 +1,656 @@ +//! API type definitions used by the JSON-RPC methods. + +use serde::{Deserialize, Serialize}; + +#[cfg(target_arch = "wasm32")] +use tsify_next::Tsify; + +// ---- Common type definitions ---- + +/// An object with a `url` field. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct UrlContainer { + /// The URL string. + pub url: String, +} + +/// A container that holds the path and the content of a Prisma schema file. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct SchemaContainer { + /// The content of the Prisma schema file. + pub content: String, + + /// The file name of the Prisma schema file. + pub path: String, +} + +/// A container that holds multiple Prisma schema files. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct SchemasContainer { + /// List of schema files. + pub files: Vec, +} + +/// A list of Prisma schema files with a config directory. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct SchemasWithConfigDir { + /// A list of Prisma schema files. + pub files: Vec, + + /// An optional directory containing the config files such as SSL certificates. + pub config_dir: String, +} + +/// The path to a migrations directory of the shape expected by Prisma Migrate. The +/// migrations will be applied to a **shadow database**, and the resulting schema +/// considered for diffing. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct PathContainer { + pub path: String, +} + +/// The path to a live database taken as input. For flexibility, this can be Prisma schemas as strings, or only the +/// connection string. See variants. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(tag = "tag")] +pub enum DatasourceParam { + /// Prisma schema as input + Schema(SchemasContainer), + + /// Connection string as input + ConnectionString(UrlContainer), +} + +/// A supported source for a database schema to diff in the `diff` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(tag = "tag", rename_all = "camelCase")] +pub enum DiffTarget { + /// An empty schema. + Empty, + + /// The Prisma schema content. The _datasource url_ will be considered, and the + /// live database it points to introspected for its schema. + SchemaDatasource(SchemasWithConfigDir), + + /// The Prisma schema content. The contents of the schema itself will be + /// considered. This source does not need any database connection. + SchemaDatamodel(SchemasContainer), + + /// The url to a live database. Its schema will be considered. + /// + /// This will cause the schema engine to connect to the database and read from it. + /// It will not write. + Url(UrlContainer), + + /// The Prisma schema content for migrations. The migrations will be applied to a **shadow database**, and the resulting schema + /// considered for diffing. + Migrations(PathContainer), +} + +/// A diagnostic returned by `diagnoseMigrationHistory` when looking at the +/// database migration history in relation to the migrations directory. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(tag = "tag")] +pub enum HistoryDiagnostic { + /// Migrations directory is behind the database. + MigrationsDirectoryIsBehind, + + /// Histories diverge. + HistoriesDiverge, + + /// There are migrations in the migrations directory that have not been applied to + /// the database yet. + DatabaseIsBehind(DatabaseIsBehindFields), +} + +/// Fields for the DatabaseIsBehind variant. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct DatabaseIsBehindFields {} + +/// The location of the live database to connect to. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(tag = "tag", rename_all = "camelCase")] +pub enum DbExecuteDatasourceType { + /// Prisma schema files and content to take the datasource URL from. + Schema(SchemasWithConfigDir), + + /// The URL of the database to run the command on. + Url(UrlContainer), +} + +/// A suggested action for the CLI `migrate dev` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(tag = "tag", rename_all = "camelCase")] +pub enum DevAction { + /// Reset the database. + Reset(DevActionReset), + + /// Proceed to the next step + CreateMigration, +} + +/// Reset action fields. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct DevActionReset { + /// Why do we need to reset? + pub reason: String, +} + +// ---- JSON-RPC API types ---- + +// Apply Migrations + +/// The input to the `applyMigrations` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct ApplyMigrationsInput { + /// The location of the migrations directory. + pub migrations_directory_path: String, +} + +/// The output of the `applyMigrations` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct ApplyMigrationsOutput { + /// The names of the migrations that were just applied. Empty if no migration was applied. + pub applied_migration_names: Vec, +} + +// Create Database + +/// The type of params for the `createDatabase` method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct CreateDatabaseParams { + /// The datasource parameter. + pub datasource: DatasourceParam, +} + +/// The result for the `createDatabase` method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct CreateDatabaseResult { + /// The name of the created database. + pub database_name: String, +} + +// Create Migration + +/// The input to the `createMigration` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct CreateMigrationInput { + /// If true, always generate a migration, but do not apply. + pub draft: bool, + + /// The user-given name for the migration. This will be used for the migration directory. + pub migration_name: String, + + /// The filesystem path of the migrations directory to use. + pub migrations_directory_path: String, + + /// The Prisma schema content to use as a target for the generated migration. + pub schema: SchemasContainer, +} + +/// The output of the `createMigration` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct CreateMigrationOutput { + /// The name of the newly generated migration directory, if any. + /// + /// generatedMigrationName will be null if: + /// + /// 1. The migration we generate would be empty, **AND** + /// 2. the `draft` param was not true, because in that case the engine would still generate an empty + /// migration script. + pub generated_migration_name: Option, +} + +// DB Execute + +/// The type of params accepted by dbExecute. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct DbExecuteParams { + /// The location of the live database to connect to. + pub datasource_type: DbExecuteDatasourceType, + + /// The input script. + pub script: String, +} + +/// The type of results returned by dbExecute. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct DbExecuteResult {} + +// Debug Panic + +/// Request for debug panic. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct DebugPanicInput {} + +/// Response for debug panic. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct DebugPanicOutput {} + +// Dev Diagnostic + +/// The request type for `devDiagnostic`. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct DevDiagnosticInput { + /// The location of the migrations directory. + pub migrations_directory_path: String, +} + +/// The response type for `devDiagnostic`. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct DevDiagnosticOutput { + /// The suggested course of action for the CLI. + pub action: DevAction, +} + +// Diagnose Migration History + +/// The request params for the `diagnoseMigrationHistory` method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct DiagnoseMigrationHistoryInput { + /// The path to the root of the migrations directory. + pub migrations_directory_path: String, + + /// Whether creating a shadow database is allowed. + pub opt_in_to_shadow_database: bool, +} + +/// The result type for `diagnoseMigrationHistory` responses. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct DiagnoseMigrationHistoryOutput { + /// The names of the migrations for which the checksum of the script in the + /// migration directory does not match the checksum of the applied migration + /// in the database. + pub edited_migration_names: Vec, + + /// The names of the migrations that are currently in a failed state in the migrations table. + pub failed_migration_names: Vec, + + /// Is the migrations table initialized/present in the database? + pub has_migrations_table: bool, + + /// The current status of the migration history of the database + /// relative to migrations directory. `null` if they are in sync and up + /// to date. + pub history: Option, +} + +// Diff + +/// The type of params for the `diff` method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct DiffParams { + /// The source of the schema to consider as a _starting point_. + pub from: DiffTarget, + + /// The source of the schema to consider as a _destination_, or the desired + /// end-state. + pub to: DiffTarget, + + /// The URL to a live database to use as a shadow database. The schema and data on + /// that database will be wiped during diffing. + /// + /// This is only necessary when one of `from` or `to` is referencing a migrations + /// directory as a source for the schema. + pub shadow_database_url: Option, + + /// By default, the response will contain a human-readable diff. If you want an + /// executable script, pass the `"script": true` param. + pub script: bool, + + /// Whether the --exit-code param was passed. + /// + /// If this is set, the engine will return exitCode = 2 in the diffResult in case the diff is + /// non-empty. Other than this, it does not change the behaviour of the command. + pub exit_code: Option, +} + +/// The result type for the `diff` method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct DiffResult { + /// The exit code that the CLI should return. + pub exit_code: u32, +} + +// List Migration Directories + +/// The input to the `listMigrationDirectories` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct ListMigrationDirectoriesInput { + /// The location of the migrations directory. + pub migrations_directory_path: String, +} + +/// The output of the `listMigrationDirectories` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct ListMigrationDirectoriesOutput { + /// The names of the migrations in the migration directory. Empty if no migrations are found. + pub migrations: Vec, +} + +// Introspect SQL + +/// Params type for the introspectSql method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct IntrospectSqlParams { + /// The database URL. + pub url: String, + /// SQL queries to introspect. + pub queries: Vec, +} + +/// Result type for the introspectSql method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct IntrospectSqlResult { + /// The introspected queries. + pub queries: Vec, +} + +/// Input for a single SQL query. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct SqlQueryInput { + /// The name of the query. + pub name: String, + /// The source SQL. + pub source: String, +} + +/// Output for a single SQL query. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct SqlQueryOutput { + /// The name of the query. + pub name: String, + /// The source SQL. + pub source: String, + /// Optional documentation. + pub documentation: Option, + /// Query parameters. + pub parameters: Vec, + /// Query result columns. + pub result_columns: Vec, +} + +/// Information about a SQL query parameter. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct SqlQueryParameterOutput { + /// Parameter name. + pub name: String, + /// Parameter type. + pub typ: String, + /// Optional documentation. + pub documentation: Option, + /// Whether the parameter is nullable. + pub nullable: bool, +} + +/// Information about a SQL query result column. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct SqlQueryColumnOutput { + /// Column name. + pub name: String, + /// Column type. + pub typ: String, + /// Whether the column is nullable. + pub nullable: bool, +} + +// Introspect + +/// Introspect the database (db pull) +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct IntrospectParams { + /// Prisma schema files. + pub schema: SchemasContainer, + /// Base directory path. + pub base_directory_path: String, + /// Force flag. + pub force: bool, + /// Composite type depth. + pub composite_type_depth: isize, + /// Optional namespaces. + pub namespaces: Option>, +} + +/// Result type for the introspect method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct IntrospectResult { + /// The introspected schema. + pub schema: SchemasContainer, + /// Optional views. + pub views: Option>, + /// Optional warnings. + pub warnings: Option, +} + +/// Information about a database view. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct IntrospectionView { + /// The view definition. + pub definition: String, + /// The view name. + pub name: String, + /// The schema name. + pub schema: String, +} + +// Get Database Version + +/// Get the database version for error reporting. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct GetDatabaseVersionInput { + /// The datasource parameter. + pub datasource: DatasourceParam, +} + +/// Output for the getDatabaseVersion method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct GetDatabaseVersionOutput { + /// The database version. + pub version: String, +} + +// Evaluate Data Loss + +/// Development command for migrations. Evaluate the data loss induced by the next +/// migration the engine would generate on the main database. +/// +/// At this stage, the engine does not create or mutate anything in the database +/// nor in the migrations directory. +/// +/// This is part of the `migrate dev` flow. +/// +/// **Note**: the engine currently assumes the main database schema is up-to-date +/// with the migration history. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct EvaluateDataLossInput { + /// The location of the migrations directory. + pub migrations_directory_path: String, + /// The prisma schema files to migrate to. + pub schema: SchemasContainer, +} + +/// The output of the `evaluateDataLoss` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct EvaluateDataLossOutput { + /// The number migration steps that would be generated. If this is empty, we + /// wouldn't generate a new migration, unless the `draft` option is + /// passed. + pub migration_steps: u32, + /// Steps that cannot be executed on the local database in the + /// migration that would be generated. + pub unexecutable_steps: Vec, + /// Destructive change warnings for the local database. These are the + /// warnings *for the migration that would be generated*. This does not + /// include other potentially yet unapplied migrations. + pub warnings: Vec, +} + +/// A data loss warning or an unexecutable migration error, associated with the step that triggered it. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct MigrationFeedback { + /// The human-readable message. + pub message: String, + /// The index of the step this pertains to. + pub step_index: u32, +} + +// Ensure Connection Validity + +/// Make sure the schema engine can connect to the database from the Prisma schema. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct EnsureConnectionValidityParams { + /// The datasource parameter. + pub datasource: DatasourceParam, +} + +/// Result type for the ensureConnectionValidity method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct EnsureConnectionValidityResult {} + +// Mark Migration Applied + +/// Mark a migration as applied in the migrations table. +/// +/// There are two possible outcomes: +/// +/// - The migration is already in the table, but in a failed state. In this case, we will mark it +/// as rolled back, then create a new entry. +/// - The migration is not in the table. We will create a new entry in the migrations table. The +/// `started_at` and `finished_at` will be the same. +/// - If it is already applied, we return a user-facing error. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct MarkMigrationAppliedInput { + /// The name of the migration to mark applied. + pub migration_name: String, + + /// The path to the root of the migrations directory. + pub migrations_directory_path: String, +} + +/// The output of the `markMigrationApplied` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct MarkMigrationAppliedOutput {} + +// Mark Migration Rolled Back + +/// Mark an existing failed migration as rolled back in the migrations table. It +/// will still be there, but ignored for all purposes except as audit trail. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct MarkMigrationRolledBackInput { + /// The name of the migration to mark rolled back. + pub migration_name: String, +} + +/// The output of the `markMigrationRolledBack` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct MarkMigrationRolledBackOutput {} + +// Reset + +/// The input to the `reset` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct ResetInput {} + +/// The output of the `reset` command. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct ResetOutput {} + +// Schema Push + +/// Request params for the `schemaPush` method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +pub struct SchemaPushInput { + /// Push the schema ignoring destructive change warnings. + pub force: bool, + + /// The Prisma schema files. + pub schema: SchemasContainer, +} + +/// Response result for the `schemaPush` method. +#[derive(Debug, Serialize, Deserialize)] +#[cfg_attr(target_arch = "wasm32", derive(Tsify))] +#[serde(rename_all = "camelCase")] +pub struct SchemaPushOutput { + /// How many migration steps were executed. + pub executed_steps: u32, + + /// Steps that cannot be executed in the current state of the database. + pub unexecutable: Vec, + + /// Destructive change warnings. + pub warnings: Vec, +}