From 503658c9c83493e08859def813ccdf9110e4a5d6 Mon Sep 17 00:00:00 2001 From: Yogesh01000100 Date: Tue, 20 Aug 2024 21:08:54 +0000 Subject: [PATCH] feat(satp-hermes): add crash recovery & rollback protocol 1. Implemented recovery & rollback using RPC-based message handlers. 2. Added rollback strategies for all SATP stages. 3. Integrated database log management for recovery and rollback. 4. Added cron jobs for scheduled crash detection and recovery initiation. Co-authored-by: Rafael Belchior Co-authored-by: Carlos Amaro Signed-off-by: Yogesh01000100 chore(satp-hermes): improve DB management Signed-off-by: Rafael Belchior chore(satp-hermes): crash recovery architecture Signed-off-by: Rafael Belchior fix(recovery): enhance crash recovery and rollback implementation Signed-off-by: Yogesh01000100 refactor(recovery): consolidate logic and improve SATP message handling Signed-off-by: Yogesh01000100 feat(recovery): add rollback implementations Signed-off-by: Yogesh01000100 fix: correct return types and inits Signed-off-by: Yogesh01000100 fix: add unit tests and resolve rollbackstate Signed-off-by: Yogesh01000100 feat: add function processing logs from g2 Signed-off-by: Yogesh01000100 feat: add cron schedule for periodic crash checks Signed-off-by: Yogesh01000100 fix: resolve rollback condition and add tests Signed-off-by: Yogesh01000100 feat: add orchestrator communication layer using connect-RPC Signed-off-by: Yogesh01000100 feat: add rollback protocol rpc Signed-off-by: Yogesh01000100 fix: handle server log synchronization Signed-off-by: Yogesh01000100 fix: resolve gol errors, add unit tests Signed-off-by: Yogesh01000100 fix: handle server-side rollback Signed-off-by: Yogesh01000100 fix: resolve networkId in rollback strategies Signed-off-by: Yogesh01000100 --- packages/cactus-plugin-satp-hermes/README.md | 42 +- .../cactus-plugin-satp-hermes/package.json | 2 + .../src/knex/knexfile-remote.ts | 8 +- .../src/knex/knexfile.ts | 6 +- .../20220331132128_create_logs_table.ts | 2 +- .../proto/cacti/satp/v02/crash_recovery.proto | 8 +- .../core/crash-management/client-service.ts | 141 ++++ .../core/crash-management/crash-handler.ts | 146 ++++ .../rollback/rollback-strategy-factory.ts | 124 +++ .../rollback/stage0-rollback-strategy.ts | 163 ++++ .../rollback/stage1-rollback-strategy.ts | 106 +++ .../rollback/stage2-rollback-strategy.ts | 161 ++++ .../rollback/stage3-rollback-strategy.ts | 177 +++++ .../core/crash-management/server-service.ts | 221 ++++++ .../src/main/typescript/core/satp-session.ts | 21 + .../src/main/typescript/core/types.ts | 11 +- .../cacti/satp/v02/crash_recovery_connect.ts | 34 +- .../proto/cacti/satp/v02/crash_recovery_pb.ts | 423 +++++++++- .../src/main/typescript/gol/crash-manager.ts | 726 ++++++++++++++++++ .../typescript/gol/gateway-orchestrator.ts | 19 + .../src/main/typescript/gol/satp-manager.ts | 2 +- .../src/main/typescript/logging.ts | 6 +- .../typescript/plugin-satp-hermes-gateway.ts | 89 ++- .../repository/knex-local-log-repository.ts | 2 +- .../main/typescript/types/satp-protocol.ts | 5 +- .../recovery/recovery-stage-1.test.ts | 381 +++++++++ .../recovery/recovery-stage-2.test.ts | 392 ++++++++++ .../recovery/recovery-stage-3.test.ts | 436 +++++++++++ .../rollback/rollback-stage-0.test.ts | 426 ++++++++++ .../rollback/rollback-stage-1.test.ts | 354 +++++++++ .../rollback/rollback-stage-2.test.ts | 427 ++++++++++ .../rollback/rollback-stage-3.test.ts | 470 ++++++++++++ .../unit/crash-management/cron-job.test.ts | 269 +++++++ .../crash-management/rollback-factory.test.ts | 239 ++++++ .../unit/crash-management/scenarios.test.ts | 490 ++++++++++++ .../src/test/typescript/unit/services.test.ts | 2 +- .../satp-runner/satp-gateway-runner.ts | 16 + yarn.lock | 56 +- 38 files changed, 6566 insertions(+), 37 deletions(-) create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/client-service.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/crash-handler.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/rollback-strategy-factory.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage0-rollback-strategy.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage1-rollback-strategy.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage2-rollback-strategy.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage3-rollback-strategy.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/server-service.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/main/typescript/gol/crash-manager.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-1.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-2.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-3.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-0.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-1.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-2.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-3.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/cron-job.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/rollback-factory.test.ts create mode 100644 packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/scenarios.test.ts diff --git a/packages/cactus-plugin-satp-hermes/README.md b/packages/cactus-plugin-satp-hermes/README.md index 5513b7ee0c..bf604a42bf 100644 --- a/packages/cactus-plugin-satp-hermes/README.md +++ b/packages/cactus-plugin-satp-hermes/README.md @@ -57,6 +57,17 @@ The sequence diagram of SATP is pictured below. ![satp-sequence-diagram](https://i.imgur.com/SOdXFEt.png) +### Crash Recovery Integration +The crash recovery protocol ensures session consistency across all stages of SATP. Each session's state, logs, hashes, timestamps, and signatures are stored and recovered using the following mechanisms: + +1. **Session Logs**: A persistent log storage mechanism ensures crash-resilient state recovery. +2. **Stage Recovery**: Recovers interrupted sessions by validating logs, hashes, timestamps, and signatures to maintain protocol integrity. +1. **Consistency Checks**: Ensures all messages and actions are consistent across both gateways and the connected ledgers. +2. **Rollback Operations**: In the event of a timeout or irrecoverable failure, rollback messages ensure the state reverts back the current stage. +3. **Logging & Proofs**: The SQLite3 database is leveraged for state consistency and proof accountability across gateways. + +Refer to the [Crash Recovery Sequence](https://datatracker.ietf.org/doc/html/draft-belchior-satp-gateway-recovery) for more details. + ### Application-to-Gateway API (API Type 1) We @@ -76,17 +87,28 @@ There are Client and Server Endpoints for each type of message detailed in the S - CommitFinalV1Response - TransferCompleteV1Request - ClientV1Request +### Crash Recovery Endpoints +There are Client and Server gRPC Endpoints for the recovery and rollback messages: -There are also defined the endpoints for the crash recovery procedure (there is still missing the endpoint to receive the Rollback mesage): - - RecoverV1Message - - RecoverUpdateV1Message - - RecoverUpdateAckV1Message - - RecoverSuccessV1Message - - RollbackV1Message +- **Recovery Messages:** + - `RecoverV2Message` + - `RecoverV2SuccessMessage` + - `RecoverUpdateMessage` +- **Rollback Messages:** + - `RollbackV2Message` + - `RollbackAckMessage` ## Use case Alice and Bob, in blockchains A and B, respectively, want to make a transfer of an asset from one to the other. Gateway A represents the gateway connected to Alice's blockchain. Gateway B represents the gateway connected to Bob's blockchain. Alice and Bob will run SATP, which will execute the transfer of the asset from blockchain A to blockchain B. The above endpoints will be called in sequence. Notice that the asset will first be locked on blockchain A and a proof is sent to the server-side. Afterward, the asset on the original blockchain is extinguished, followed by its regeneration on blockchain B. +### Role of Crash Recovery in SATP +In SATP, crash recovery ensures that asset transfers remain consistent and fault-tolerant across distributed ledgers. Key features include: +- **Session Recovery**: Gateways synchronize state using recovery messages, ensuring continuity after failures. +- **Rollback**: For irrecoverable errors, rollback procedures ensure safe reversion to previous states. +- **Fault Resilience**: Enables recovery from crashes while maintaining the integrity of ongoing transfers. + +These features enhance reliability in scenarios where network or gateway disruptions occur during asset transfers. + ## Running the tests [A test of the entire protocol with manual calls to the methods, i.e. without ledger connectors and Open API.](https://github.com/hyperledger/cactus/blob/2e94ef8d3b34449c7b4d48e37d81245851477a3e/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/satp.test.ts) @@ -109,6 +131,14 @@ Alice and Bob, in blockchains A and B, respectively, want to make a transfer of [A test with a backup gateway resuming the protocol after the client gateway crashed.](https://github.com/hyperledger/cactus/tree/main/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/backup-gateway-after-client-crash.test.ts) + +### Crash Recovery Tests +- [Stage 1 Recovery Test](src/test/typescript/integration/recovery/recovery-stage-1.test.ts) +- [Stage 2 Recovery Test](src/test/typescript/integration/recovery/recovery-stage-2.test.ts) +- [Stage 3 Recovery Test](src/test/typescript/integration/recovery/recovery-stage-3.test.ts) +- [Rollback Test](src/test/typescript/integration/rollback/rollback.test.ts) +- [Rollback Timeout Test](src/test/typescript/integration/rollback/rollback-timeout.test.ts) + For developers that want to test separate steps/phases of the SATP protocol, please refer to [these](https://github.com/hyperledger/cactus/blob/2e94ef8d3b34449c7b4d48e37d81245851477a3e/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/) test files (client and server side along with the recovery procedure). ## Usage diff --git a/packages/cactus-plugin-satp-hermes/package.json b/packages/cactus-plugin-satp-hermes/package.json index b12e8e4143..a47cf53702 100644 --- a/packages/cactus-plugin-satp-hermes/package.json +++ b/packages/cactus-plugin-satp-hermes/package.json @@ -136,6 +136,7 @@ "jsonc": "2.0.0", "knex": "2.4.0", "kubo-rpc-client": "3.0.1", + "node-schedule": "2.1.1", "npm-run-all": "4.1.5", "openzeppelin-solidity": "3.4.2", "pg": "8.13.1", @@ -163,6 +164,7 @@ "@types/fs-extra": "11.0.4", "@types/google-protobuf": "3.15.12", "@types/node": "18.18.2", + "@types/node-schedule": "2.1.7", "@types/pg": "8.11.10", "@types/swagger-ui-express": "4.1.6", "@types/tape": "4.13.4", diff --git a/packages/cactus-plugin-satp-hermes/src/knex/knexfile-remote.ts b/packages/cactus-plugin-satp-hermes/src/knex/knexfile-remote.ts index 1a0bd3709a..07e4bee0c5 100644 --- a/packages/cactus-plugin-satp-hermes/src/knex/knexfile-remote.ts +++ b/packages/cactus-plugin-satp-hermes/src/knex/knexfile-remote.ts @@ -6,11 +6,11 @@ import { Knex } from "knex"; const envPath = process.env.ENV_PATH; dotenv.config({ path: envPath }); -const config: { [key: string]: Knex.Config } = { - development: { +export const knexRemoteInstance: { [key: string]: Knex.Config } = { + default: { client: "sqlite3", connection: { - filename: path.resolve(__dirname, ".dev.remote-" + uuidv4() + ".sqlite3"), + filename: path.resolve(__dirname, `.dev.remote-${uuidv4()}.sqlite3`), }, migrations: { directory: path.resolve(__dirname, "migrations"), @@ -31,5 +31,3 @@ const config: { [key: string]: Knex.Config } = { }, }, }; - -export default config; diff --git a/packages/cactus-plugin-satp-hermes/src/knex/knexfile.ts b/packages/cactus-plugin-satp-hermes/src/knex/knexfile.ts index 9c7535ea11..d7e42cab1f 100644 --- a/packages/cactus-plugin-satp-hermes/src/knex/knexfile.ts +++ b/packages/cactus-plugin-satp-hermes/src/knex/knexfile.ts @@ -6,8 +6,8 @@ import { Knex } from "knex"; const envPath = process.env.ENV_PATH; dotenv.config({ path: envPath }); -const config: { [key: string]: Knex.Config } = { - development: { +export const knexLocalInstance: { [key: string]: Knex.Config } = { + default: { client: "sqlite3", connection: { filename: path.resolve(__dirname, `.dev.local-${uuidv4()}.sqlite3`), @@ -34,5 +34,3 @@ const config: { [key: string]: Knex.Config } = { }, }, }; - -export default config; diff --git a/packages/cactus-plugin-satp-hermes/src/knex/migrations/20220331132128_create_logs_table.ts b/packages/cactus-plugin-satp-hermes/src/knex/migrations/20220331132128_create_logs_table.ts index 6227cc4aad..336b358e68 100644 --- a/packages/cactus-plugin-satp-hermes/src/knex/migrations/20220331132128_create_logs_table.ts +++ b/packages/cactus-plugin-satp-hermes/src/knex/migrations/20220331132128_create_logs_table.ts @@ -2,7 +2,7 @@ import { Knex } from "knex"; export function up(knex: Knex): Knex.SchemaBuilder { return knex.schema.createTable("logs", (table) => { - table.string("sessionID").notNullable(); + table.string("sessionId").notNullable(); table.string("type").notNullable(); table.string("key").notNullable().primary(); table.string("operation").notNullable(); diff --git a/packages/cactus-plugin-satp-hermes/src/main/proto/cacti/satp/v02/crash_recovery.proto b/packages/cactus-plugin-satp-hermes/src/main/proto/cacti/satp/v02/crash_recovery.proto index 6ba42bc554..39d881338b 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/proto/cacti/satp/v02/crash_recovery.proto +++ b/packages/cactus-plugin-satp-hermes/src/main/proto/cacti/satp/v02/crash_recovery.proto @@ -9,7 +9,7 @@ service CrashRecovery { // step RPCs rpc RecoverV2Message(RecoverMessage) returns (RecoverUpdateMessage); - rpc RecoverV2SuccessMessage(RecoverSuccessMessage) returns (google.protobuf.Empty); + rpc RecoverV2SuccessMessage(RecoverSuccessMessage) returns (RecoverSuccessMessageResponse); rpc RollbackV2Message(RollbackMessage) returns (RollbackAckMessage); } @@ -41,6 +41,12 @@ message RecoverSuccessMessage { string sender_signature = 6; } +message RecoverSuccessMessageResponse { + string session_id = 1; + bool received = 2; + string sender_signature = 3; +} + message RollbackMessage { string session_id = 1; string message_type = 2; diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/client-service.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/client-service.ts new file mode 100644 index 0000000000..15adf8622a --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/client-service.ts @@ -0,0 +1,141 @@ +import { + RecoverMessage, + RecoverMessageSchema, + RecoverSuccessMessage, + RecoverSuccessMessageSchema, + RollbackMessage, + RollbackMessageSchema, + RollbackState, +} from "../../../typescript/generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { JsObjectSigner, Logger } from "@hyperledger/cactus-common"; +import { SATPSession } from "../satp-session"; +import { create } from "@bufbuild/protobuf"; +import { SATPLogger } from "../../logging"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { bufArray2HexStr, sign } from "../../gateway-utils"; + +export class CrashRecoveryClientService { + constructor( + private readonly dbLogger: SATPLogger, + private readonly log: Logger, + private readonly signer: JsObjectSigner, + ) { + this.log = log; + this.log.trace(`Initialized ${CrashRecoveryClientService.name}`); + } + + public async createRecoverMessage( + session: SATPSession, + ): Promise { + const fnTag = `${CrashRecoveryClientService.name}#createRecoverMessage`; + this.log.debug( + `${fnTag} - Creating RecoverMessage for sessionId: ${session.getSessionId()}`, + ); + + const sessionData = session.getClientSessionData(); + + const recoverMessage = create(RecoverMessageSchema, { + sessionId: session.getSessionId(), + messageType: "urn:ietf:SATP-2pc:msgtype:recover-msg", + satpPhase: "", + sequenceNumber: Number(sessionData.lastSequenceNumber), + isBackup: false, + newIdentityPublicKey: "", + lastEntryTimestamp: BigInt(sessionData.lastMessageReceivedTimestamp), + senderSignature: "", + }); + + const signature = bufArray2HexStr( + sign(this.signer, safeStableStringify(recoverMessage)), + ); + + recoverMessage.senderSignature = signature; + + await this.dbLogger.persistLogEntry({ + sessionID: recoverMessage.sessionId, + type: "urn:ietf:SATP-2pc:msgtype:recover-msg", + operation: "done", + data: safeStableStringify(sessionData), + sequenceNumber: Number(sessionData.lastSequenceNumber), + }); + this.log.debug(`${fnTag} - RecoverMessage created:`, recoverMessage); + + return recoverMessage; + } + + public async createRecoverSuccessMessage( + session: SATPSession, + ): Promise { + const fnTag = `${CrashRecoveryClientService.name}#createRecoverSuccessMessage`; + this.log.debug( + `${fnTag} - Creating RecoverSuccessMessage for sessionId: ${session.getSessionId()}`, + ); + const sessionData = session.getClientSessionData(); + const recoverSuccessMessage = create(RecoverSuccessMessageSchema, { + sessionId: session.getSessionId(), + messageType: "urn:ietf:SATP-2pc:msgtype:recover-success-msg", + hashRecoverUpdateMessage: "", + success: true, + entriesChanged: [], + senderSignature: "", + }); + + const signature = bufArray2HexStr( + sign(this.signer, safeStableStringify(recoverSuccessMessage)), + ); + + recoverSuccessMessage.senderSignature = signature; + + await this.dbLogger.persistLogEntry({ + sessionID: recoverSuccessMessage.sessionId, + type: "urn:ietf:SATP-2pc:msgtype:recover-success-msg", + operation: "done", + data: safeStableStringify(sessionData), + sequenceNumber: Number(sessionData.lastSequenceNumber), + }); + this.log.debug( + `${fnTag} - RecoverSuccessMessage created:`, + recoverSuccessMessage, + ); + + return recoverSuccessMessage; + } + + public async createRollbackMessage( + session: SATPSession, + rollbackState: RollbackState, + ): Promise { + const fnTag = `${CrashRecoveryClientService.name}#createRollbackMessage`; + this.log.debug( + `${fnTag} - Creating RollbackMessage for sessionId: ${session.getSessionId()}`, + ); + const sessionData = session.getClientSessionData(); + const rollbackMessage = create(RollbackMessageSchema, { + sessionId: session.getSessionId(), + messageType: "urn:ietf:SATP-2pc:msgtype:rollback-msg", + success: rollbackState.status === "COMPLETED", + actionsPerformed: rollbackState.rollbackLogEntries.map( + (entry) => entry.action, + ), + proofs: [], + senderSignature: "", + }); + + const signature = bufArray2HexStr( + sign(this.signer, safeStableStringify(rollbackMessage)), + ); + + rollbackMessage.senderSignature = signature; + + await this.dbLogger.persistLogEntry({ + sessionID: rollbackMessage.sessionId, + type: "urn:ietf:SATP-2pc:msgtype:rollback-msg", + operation: "done", + data: safeStableStringify(sessionData), + sequenceNumber: Number(sessionData.lastSequenceNumber), + }); + this.log.debug(`${fnTag} - RollbackMessage created:`, rollbackMessage); + + return rollbackMessage; + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/crash-handler.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/crash-handler.ts new file mode 100644 index 0000000000..0f488ada4f --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/crash-handler.ts @@ -0,0 +1,146 @@ +import { ConnectRouter } from "@connectrpc/connect"; +import { Logger } from "@hyperledger/cactus-common"; +import { + CrashRecovery, + RecoverSuccessMessageResponse, +} from "../../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { CrashRecoveryServerService } from "./server-service"; +import { CrashRecoveryClientService } from "./client-service"; +import { SATPSession } from "../satp-session"; +import { + RecoverMessage, + RecoverUpdateMessage, + RecoverSuccessMessage, + RollbackMessage, + RollbackAckMessage, + RollbackState, +} from "../../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { SATPHandler, SATPHandlerType } from "../../types/satp-protocol"; + +export class CrashRecoveryHandler implements SATPHandler { + private readonly log: Logger; + + constructor( + private readonly serverService: CrashRecoveryServerService, + private readonly clientService: CrashRecoveryClientService, + log: Logger, + ) { + this.log = log; + this.log.trace(`Initialized ${CrashRecoveryHandler.name}`); + } + + public getHandlerIdentifier(): SATPHandlerType { + return SATPHandlerType.CRASH; + } + + public getHandlerSessions(): string[] { + return []; + } + + public getStage(): string { + return "crash"; + } + + // Server-side + + private async recoverV2MessageImplementation( + req: RecoverMessage, + ): Promise { + const fnTag = `${CrashRecoveryHandler.name}#recoverV2MessageImplementation`; + this.log.debug(`${fnTag} - Handling RecoverMessage: ${req}`); + try { + return await this.serverService.handleRecover(req); + } catch (error) { + this.log.error(`${fnTag} - Error:`, error); + throw error; + } + } + + private async recoverV2SuccessMessageImplementation( + req: RecoverSuccessMessage, + ): Promise { + const fnTag = `${CrashRecoveryHandler.name}#recoverV2SuccessMessageImplementation`; + this.log.debug(`${fnTag} - Handling RecoverSuccessMessage:${req}`); + try { + return await this.serverService.handleRecoverSuccess(req); + } catch (error) { + this.log.error(`${fnTag} - Error:`, error); + throw error; + } + } + + private async rollbackV2MessageImplementation( + req: RollbackMessage, + ): Promise { + const fnTag = `${CrashRecoveryHandler.name}#rollbackV2MessageImplementation`; + this.log.debug(`${fnTag} - Handling RollbackMessage: ${req}`); + try { + return await this.serverService.handleRollback(req); + } catch (error) { + this.log.error(`${fnTag} - Error:`, error); + throw error; + } + } + + public setupRouter(router: ConnectRouter): void { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const that = this; + router.service(CrashRecovery, { + async recoverV2Message(req) { + return await that.recoverV2MessageImplementation(req); + }, + async recoverV2SuccessMessage(req) { + return await that.recoverV2SuccessMessageImplementation(req); + }, + async rollbackV2Message(req) { + return await that.rollbackV2MessageImplementation(req); + }, + }); + + this.log.info("Router setup completed for CrashRecoveryHandler"); + } + + // Client-side + + public async createRecoverMessage( + session: SATPSession, + ): Promise { + const fnTag = `${this.constructor.name}#createRecoverMessage`; + try { + return this.clientService.createRecoverMessage(session); + } catch (error) { + this.log.error(`${fnTag} - Failed to create RecoverMessage: ${error}`); + throw new Error(`Error in createRecoverMessage: ${error}`); + } + } + + public async createRecoverSuccessMessage( + session: SATPSession, + ): Promise { + const fnTag = `${this.constructor.name}#createRecoverSuccessMessage`; + try { + return await this.clientService.createRecoverSuccessMessage(session); + } catch (error) { + this.log.error( + `${fnTag} - Failed to create RecoverSuccessMessage: ${error}`, + ); + throw new Error(`Error in createRecoverSuccessMessage: ${error}`); + } + } + + public async createRollbackMessage( + session: SATPSession, + rollbackState: RollbackState, + ): Promise { + const fnTag = `${this.constructor.name}#createRollbackMessage`; + try { + return await this.clientService.createRollbackMessage( + session, + rollbackState, + ); + } catch (error) { + this.log.error(`${fnTag} - Failed to create RollbackMessage: ${error}`); + throw new Error(`Error in createRollbackMessage: ${error}`); + } + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/rollback-strategy-factory.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/rollback-strategy-factory.ts new file mode 100644 index 0000000000..7a40a35d81 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/rollback-strategy-factory.ts @@ -0,0 +1,124 @@ +import { Logger } from "@hyperledger/cactus-common"; +import { SATPSession } from "../../satp-session"; +import { Stage0RollbackStrategy } from "./stage0-rollback-strategy"; +import { Stage1RollbackStrategy } from "./stage1-rollback-strategy"; +import { Stage2RollbackStrategy } from "./stage2-rollback-strategy"; +import { Stage3RollbackStrategy } from "./stage3-rollback-strategy"; +import { SATPBridgesManager } from "../../../gol/satp-bridges-manager"; +import { RollbackState } from "../../../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { + Stage0Hashes, + Stage1Hashes, + Stage2Hashes, + Stage3Hashes, +} from "../../../generated/proto/cacti/satp/v02/common/session_pb"; +import { SATPLogger } from "../../../logging"; + +export interface RollbackStrategy { + execute(session: SATPSession): Promise; + cleanup(session: SATPSession, state: RollbackState): Promise; +} + +export class RollbackStrategyFactory { + private log: Logger; + private bridgesManager: SATPBridgesManager; + private dbLogger: SATPLogger; + + constructor( + bridgesManager: SATPBridgesManager, + log: Logger, + dbLogger: SATPLogger, + ) { + this.log = log; + this.bridgesManager = bridgesManager; + this.dbLogger = dbLogger; + } + + private isStage0Complete(hashes: Stage0Hashes): boolean { + return !!( + hashes?.newSessionRequestMessageHash && + hashes.newSessionResponseMessageHash && + hashes.preSatpTransferRequestMessageHash && + hashes.preSatpTransferResponseMessageHash + ); + } + + private isStage1Complete(hashes: Stage1Hashes): boolean { + return !!( + hashes?.transferProposalRequestMessageHash && + hashes.transferProposalReceiptMessageHash && + hashes.transferProposalRejectMessageHash && + hashes.transferCommenceRequestMessageHash && + hashes.transferCommenceResponseMessageHash + ); + } + + private isStage2Complete(hashes: Stage2Hashes): boolean { + return !!( + hashes?.lockAssertionRequestMessageHash && + hashes.lockAssertionReceiptMessageHash + ); + } + + private isStage3Complete(hashes: Stage3Hashes): boolean { + return !!( + hashes?.commitPreparationRequestMessageHash && + hashes.commitReadyResponseMessageHash && + hashes.commitFinalAssertionRequestMessageHash && + hashes.commitFinalAcknowledgementReceiptResponseMessageHash && + hashes.transferCompleteMessageHash && + hashes.transferCompleteResponseMessageHash + ); + } + + createStrategy(session: SATPSession): RollbackStrategy { + const fnTag = "RollbackStrategyFactory#createStrategy"; + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + + if (!sessionData || !sessionData.hashes) { + throw new Error("Session data or hashes are undefined."); + } + + const hashes = sessionData.hashes; + + const stage0Complete = + hashes.stage0 && this.isStage0Complete(hashes.stage0); + const stage1Complete = + stage0Complete && hashes.stage1 && this.isStage1Complete(hashes.stage1); + const stage2Complete = + stage1Complete && hashes.stage2 && this.isStage2Complete(hashes.stage2); + const stage3Complete = + stage2Complete && hashes.stage3 && this.isStage3Complete(hashes.stage3); + + if (!stage0Complete) { + this.log.debug(`${fnTag} Creating Stage0RollbackStrategy`); + return new Stage0RollbackStrategy( + this.bridgesManager, + this.log, + this.dbLogger, + ); + } else if (!stage1Complete) { + this.log.debug(`${fnTag} Creating Stage1RollbackStrategy`); + return new Stage1RollbackStrategy(this.log, this.dbLogger); + } else if (!stage2Complete) { + this.log.debug(`${fnTag} Creating Stage2RollbackStrategy`); + return new Stage2RollbackStrategy( + this.bridgesManager, + this.log, + this.dbLogger, + ); + } else if (!stage3Complete) { + this.log.debug(`${fnTag} Creating Stage3RollbackStrategy`); + return new Stage3RollbackStrategy( + this.bridgesManager, + this.log, + this.dbLogger, + ); + } else { + this.log.debug(`${fnTag} All stages completed; no rollback needed`); + throw new Error("No rollback needed as all stages are complete."); + } + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage0-rollback-strategy.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage0-rollback-strategy.ts new file mode 100644 index 0000000000..9bdba7a31e --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage0-rollback-strategy.ts @@ -0,0 +1,163 @@ +import { Logger } from "@hyperledger/cactus-common"; +import { SATPSession } from "../../satp-session"; +import { RollbackStrategy } from "./rollback-strategy-factory"; +import { + RollbackLogEntrySchema, + RollbackState, + RollbackStateSchema, +} from "../../../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { create } from "@bufbuild/protobuf"; +import { SATPBridgesManager } from "../../../gol/satp-bridges-manager"; +import { SATPLogger } from "../../../logging"; + +export class Stage0RollbackStrategy implements RollbackStrategy { + private log: Logger; + private dbLogger: SATPLogger; + private bridgeManager: SATPBridgesManager; + + constructor( + bridgeManager: SATPBridgesManager, + log: Logger, + dbLogger: SATPLogger, + ) { + this.log = log; + this.dbLogger = dbLogger; + this.bridgeManager = bridgeManager; + } + + async execute(session: SATPSession): Promise { + const fnTag = "Stage0RollbackStrategy#execute"; + this.log.info(`${fnTag} Executing rollback for Stage 0`); + + if (!session) { + throw new Error(`${fnTag}, session data is not correctly initialized`); + } + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + + if (!sessionData) { + throw new Error(`${fnTag}, session data is not correctly initialized`); + } + + const isClient = session.hasClientSessionData(); + const network = isClient + ? sessionData.senderGatewayNetworkId + : sessionData.recipientGatewayNetworkId; + + if (isClient && !network) { + throw new Error( + `${fnTag}: Unable to determine network from session data.`, + ); + } + + this.log.info(`${fnTag} network: ${network}`); + + const bridge = this.bridgeManager.getBridge(network); + if (!bridge) { + throw new Error(`${fnTag}: No bridge found for network: ${network}`); + } + + const rollbackState = create(RollbackStateSchema, { + sessionId: session.getSessionId(), + currentStage: "Stage0", + stepsRemaining: Number(), + rollbackLogEntries: [], + estimatedTimeToCompletion: "", + status: "IN_PROGRESS", + details: "", + }); + + try { + if (isClient) { + // Client-side: + const assetId = sessionData.senderAsset?.tokenId; + + if (!assetId) { + throw new Error(`${fnTag}: Asset ID is undefined`); + } + + this.log.info(`${fnTag} Unwrapping Asset ID: ${assetId}`); + await bridge.unwrapAsset(assetId); + + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage0", + timestamp: new Date().toISOString(), + action: "UNWRAP_ASSET", + status: "SUCCESS", + details: "", + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "COMPLETED"; + } else { + // Server-side: + const assetId = sessionData.receiverAsset?.tokenId; + + if (!assetId) { + throw new Error(`${fnTag}: Asset ID is undefined`); + } + + this.log.info(`${fnTag} Unwrapping Asset ID: ${assetId}`); + await bridge.unwrapAsset(assetId); + + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage0", + timestamp: new Date().toISOString(), + action: "UNWRAP_ASSET", + status: "SUCCESS", + details: "", + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "COMPLETED"; + } + + this.log.info(`${fnTag} Rollback of Stage 0 completed successfully`); + return rollbackState; + } catch (error) { + this.log.error(`${fnTag} Error during rollback of Stage 0: ${error}`); + + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage0", + timestamp: new Date().toISOString(), + action: "UNWRAP_ASSET", + status: "FAILED", + details: `Rollback of Stage 0 failed: ${error}`, + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "FAILED"; + rollbackState.details = `Rollback of Stage 0 failed: ${error}`; + + return rollbackState; + } + } + + async cleanup( + session: SATPSession, + state: RollbackState, + ): Promise { + const fnTag = "Stage0RollbackStrategy#cleanup"; + this.log.info(`${fnTag} Cleaning up after Stage 0 rollback`); + + if (!session) { + this.log.error(`${fnTag} Session not found`); + return state; + } + + try { + // TODO: Implement Stage 0 specific cleanup logic + + // TODO: Update other state properties as needed + + return state; + } catch (error) { + this.log.error(`${fnTag} Cleanup failed: ${error}`); + return state; + } + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage1-rollback-strategy.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage1-rollback-strategy.ts new file mode 100644 index 0000000000..c99e0ce407 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage1-rollback-strategy.ts @@ -0,0 +1,106 @@ +import { Logger } from "@hyperledger/cactus-common"; +import { SATPSession } from "../../satp-session"; +import { RollbackStrategy } from "./rollback-strategy-factory"; +import { + RollbackLogEntrySchema, + RollbackState, + RollbackStateSchema, +} from "../../../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { create } from "@bufbuild/protobuf"; +import { SATPLogger } from "../../../logging"; + +export class Stage1RollbackStrategy implements RollbackStrategy { + private log: Logger; + private dbLogger: SATPLogger; + + constructor(log: Logger, dbLogger: SATPLogger) { + this.log = log; + this.dbLogger = dbLogger; + } + + async execute(session: SATPSession): Promise { + const fnTag = "Stage1RollbackStrategy#execute"; + this.log.info(`${fnTag} Executing rollback for Stage 1`); + + if (!session) { + throw new Error(`${fnTag}, session data is not correctly initialized`); + } + + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + + if (!sessionData) { + throw new Error(`${fnTag}, session data is not correctly initialized`); + } + + const rollbackState = create(RollbackStateSchema, { + sessionId: session.getSessionId(), + currentStage: "Stage1", + stepsRemaining: 0, + rollbackLogEntries: [], + estimatedTimeToCompletion: "", + status: "IN_PROGRESS", + details: "", + }); + + try { + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage1", + timestamp: new Date().toISOString(), + action: "NO_ACTION_REQUIRED", + status: "SUCCESS", + details: "", + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "COMPLETED"; + rollbackState.details = ""; + + this.log.info(`${fnTag} Successfully rolled back Stage 1`); + return rollbackState; + } catch (error) { + this.log.error(`${fnTag} Failed to rollback Stage 1: ${error}`); + + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: sessionData.id, + stage: "Stage1", + timestamp: new Date().toISOString(), + action: "NO_ACTION_REQUIRED", + status: "FAILED", + details: "", + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "FAILED"; + rollbackState.details = `Rollback of Stage 1 failed: ${error}`; + + return rollbackState; + } + } + + async cleanup( + session: SATPSession, + state: RollbackState, + ): Promise { + const fnTag = "Stage1RollbackStrategy#cleanup"; + this.log.info(`${fnTag} Cleaning up after Stage 1 rollback`); + + if (!session) { + this.log.error(`${fnTag} Session not found`); + return state; + } + + try { + // TODO: Implement Stage 1 specific cleanup logic + + // TODO: Update other state properties as needed + + return state; + } catch (error) { + this.log.error(`${fnTag} Cleanup failed: ${error}`); + return state; + } + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage2-rollback-strategy.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage2-rollback-strategy.ts new file mode 100644 index 0000000000..10b573c3d7 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage2-rollback-strategy.ts @@ -0,0 +1,161 @@ +import { Logger } from "@hyperledger/cactus-common"; +import { SATPSession } from "../../satp-session"; +import { RollbackStrategy } from "./rollback-strategy-factory"; +import { + RollbackLogEntrySchema, + RollbackState, + RollbackStateSchema, +} from "../../../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { SATPBridgesManager } from "../../../gol/satp-bridges-manager"; +import { create } from "@bufbuild/protobuf"; +import { SATPLogger } from "../../../logging"; + +export class Stage2RollbackStrategy implements RollbackStrategy { + private log: Logger; + private bridgeManager: SATPBridgesManager; + private dbLogger: SATPLogger; + + constructor( + bridgesManager: SATPBridgesManager, + log: Logger, + dbLogger: SATPLogger, + ) { + this.log = log; + this.bridgeManager = bridgesManager; + this.dbLogger = dbLogger; + } + + async execute(session: SATPSession): Promise { + const fnTag = "Stage2RollbackStrategy#execute"; + this.log.info(`${fnTag} Executing rollback for Stage 2`); + + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + + if (!sessionData) { + throw new Error(`${fnTag}, session data is not correctly initialized`); + } + + const isClient = session.hasClientSessionData(); + const network = isClient + ? sessionData.senderGatewayNetworkId + : sessionData.recipientGatewayNetworkId; + + if (isClient && !network) { + throw new Error( + `${fnTag}: Unable to determine client network from session data.`, + ); + } + this.log.info(`${fnTag} network: ${network}`); + + const bridge = this.bridgeManager.getBridge(network); + if (!bridge) { + throw new Error(`${fnTag}: No bridge found for network: ${network}`); + } + + const rollbackState = create(RollbackStateSchema, { + sessionId: session.getSessionId(), + currentStage: "Stage2", + stepsRemaining: Number(), + rollbackLogEntries: [], + estimatedTimeToCompletion: "", + status: "IN_PROGRESS", + details: "", + }); + + try { + if (isClient) { + // Client-side: Unlock the asset + const assetId = sessionData.senderAsset?.tokenId; + const amount = sessionData.senderAsset?.amount; + + if (!assetId) { + throw new Error(`${fnTag}: Asset ID is undefined`); + } + + if (!amount) { + throw new Error(`${fnTag}: Amount is missing`); + } + + this.log.info( + `${fnTag} Unlocking Asset ID: ${assetId}, Amount: ${amount}`, + ); + + await bridge.unlockAsset(assetId, Number(amount)); + + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage2", + timestamp: new Date().toISOString(), + action: "UNLOCK_ASSET", + status: "SUCCESS", + details: "", + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "COMPLETED"; + } else { + // Server-side: + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage2", + timestamp: new Date().toISOString(), + action: "NO_ACTION_REQUIRED", + status: "SUCCESS", + details: "", + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "COMPLETED"; + } + + this.log.info( + `${fnTag} Successfully rolled back Stage 2 for session ${session.getSessionId()}`, + ); + return rollbackState; + } catch (error) { + this.log.error(`${fnTag} Failed to rollback Stage 2: ${error}`); + + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage2", + timestamp: new Date().toISOString(), + action: isClient ? "UNLOCK_ASSET" : "NO_ACTION_REQUIRED", + status: "FAILED", + details: `Rollback of Stage 2 failed: ${error}`, + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "FAILED"; + rollbackState.details = `Rollback of Stage 2 failed: ${error}`; + + return rollbackState; + } + } + + async cleanup( + session: SATPSession, + state: RollbackState, + ): Promise { + const fnTag = "Stage2RollbackStrategy#cleanup"; + this.log.info(`${fnTag} Cleaning up after Stage 2 rollback`); + + if (!session) { + this.log.error(`${fnTag} Session not found`); + return state; + } + + try { + // TODO: Implement Stage 2 specific cleanup logic + + //state.currentStage = "Stage2"; + // TODO: Update other state properties as needed + + return state; + } catch (error) { + this.log.error(`${fnTag} Cleanup failed: ${error}`); + return state; + } + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage3-rollback-strategy.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage3-rollback-strategy.ts new file mode 100644 index 0000000000..36d61b3c2a --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/rollback/stage3-rollback-strategy.ts @@ -0,0 +1,177 @@ +import { Logger } from "@hyperledger/cactus-common"; +import { SATPSession } from "../../satp-session"; +import { RollbackStrategy } from "./rollback-strategy-factory"; +import { + RollbackLogEntrySchema, + RollbackState, + RollbackStateSchema, +} from "../../../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { SATPBridgesManager } from "../../../gol/satp-bridges-manager"; +import { create } from "@bufbuild/protobuf"; +import { SATPLogger } from "../../../logging"; + +// todo : consider remint on source and burn the destination +export class Stage3RollbackStrategy implements RollbackStrategy { + private log: Logger; + private bridgeManager: SATPBridgesManager; + private dbLogger: SATPLogger; + + constructor( + bridgesManager: SATPBridgesManager, + log: Logger, + dbLogger: SATPLogger, + ) { + this.log = log; + this.bridgeManager = bridgesManager; + this.dbLogger = dbLogger; + } + + async execute(session: SATPSession): Promise { + const fnTag = "Stage3RollbackStrategy#execute"; + this.log.info(`${fnTag} Executing rollback for Stage 3`); + + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + + if (!sessionData) { + throw new Error(`${fnTag}, session data is not correctly initialized`); + } + + const isClient = session.hasClientSessionData(); + const network = isClient + ? sessionData.senderGatewayNetworkId + : sessionData.recipientGatewayNetworkId; + + if (isClient && !network) { + throw new Error( + `${fnTag}: Unable to determine client network from session data.`, + ); + } + this.log.info(`${fnTag} network: ${network}`); + + const bridge = this.bridgeManager.getBridge(network); + if (!bridge) { + throw new Error(`${fnTag}: No bridge found for network: ${network}`); + } + + const rollbackState = create(RollbackStateSchema, { + sessionId: session.getSessionId(), + currentStage: "Stage3", + stepsRemaining: Number(), + rollbackLogEntries: [], + estimatedTimeToCompletion: "", + status: "IN_PROGRESS", + details: "", + }); + + try { + if (isClient) { + // Client-side: + + const assetId = sessionData.senderAsset?.tokenId; + const amount = sessionData.senderAsset?.amount; + + if (!assetId) { + throw new Error(`${fnTag}: Sender Asset ID is undefined`); + } + + if (amount === undefined || amount === null) { + throw new Error(`${fnTag}: Amount is missing`); + } + + this.log.info(`${fnTag} minting asset at source, assetId: ${assetId}`); + + await bridge.mintAsset(assetId, Number(amount)); + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage3", + timestamp: new Date().toISOString(), + action: "MINT_ASSET_SOURCE", + status: "SUCCESS", + details: "", + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + } + + if (!isClient) { + // Server-side: + const assetId = sessionData.receiverAsset?.tokenId; + const amount = sessionData.receiverAsset?.amount; + + if (!assetId) { + throw new Error(`${fnTag}: Sender Asset ID is undefined`); + } + + if (amount === undefined || amount === null) { + throw new Error(`${fnTag}: Amount is missing`); + } + + this.log.info(`${fnTag} Burning Asset ID at Destination: ${assetId}`); + + await bridge.burnAsset(assetId, Number(amount)); + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage3", + timestamp: new Date().toISOString(), + action: "BURN_ASSET_DESTINATION", + status: "SUCCESS", + details: "", + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + } + + rollbackState.status = "COMPLETED"; + rollbackState.details = ""; + + this.log.info( + `${fnTag} Successfully rolled back Stage 3 for session ${session.getSessionId()}`, + ); + return rollbackState; + } catch (error) { + this.log.error(`${fnTag} Failed to rollback Stage 3: ${error}`); + + const rollbackLogEntry = create(RollbackLogEntrySchema, { + sessionId: session.getSessionId(), + stage: "Stage3", + timestamp: new Date().toISOString(), + action: isClient ? "MINT_ASSET_SOURCE" : "BURN_ASSET_DESTINATION", + status: "FAILED", + details: `Rollback of Stage 3 failed: ${error}`, + }); + + rollbackState.rollbackLogEntries.push(rollbackLogEntry); + rollbackState.status = "FAILED"; + rollbackState.details = `Rollback of Stage 3 failed: ${error}`; + + return rollbackState; + } + } + + async cleanup( + session: SATPSession, + state: RollbackState, + ): Promise { + const fnTag = "Stage3RollbackStrategy#cleanup"; + this.log.info(`${fnTag} Cleaning up after Stage 3 rollback`); + + if (!session) { + this.log.error(`${fnTag} Session not found`); + return state; + } + + try { + // TODO: Implement Stage 3 specific cleanup logic + + //state.currentStage = ""; + // TODO: Update other state properties as needed + + return state; + } catch (error) { + this.log.error(`${fnTag} Cleanup failed: ${error}`); + return state; + } + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/server-service.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/server-service.ts new file mode 100644 index 0000000000..978a6615f0 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/crash-management/server-service.ts @@ -0,0 +1,221 @@ +import { + RecoverMessage, + RecoverUpdateMessage, + RecoverSuccessMessage, + RollbackMessage, + RollbackAckMessage, + RecoverUpdateMessageSchema, + RollbackAckMessageSchema, + RecoverSuccessMessageResponse, + RecoverSuccessMessageResponseSchema, +} from "../../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { SATPSession } from "../satp-session"; +import { ILocalLogRepository } from "../../repository/interfaces/repository"; +import { JsObjectSigner, Logger } from "@hyperledger/cactus-common"; +import { RollbackStrategyFactory } from "./rollback/rollback-strategy-factory"; +import { SATPBridgesManager } from "../../gol/satp-bridges-manager"; +import { create } from "@bufbuild/protobuf"; +import { SATPLogger } from "../../logging"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { bufArray2HexStr, sign } from "../../gateway-utils"; + +export class CrashRecoveryServerService { + constructor( + private readonly bridgesManager: SATPBridgesManager, + private readonly logRepository: ILocalLogRepository, + private readonly sessions: Map, + private readonly dbLogger: SATPLogger, + private readonly signer: JsObjectSigner, + private readonly log: Logger, + ) { + this.log = log; + this.log.trace(`Initialized ${CrashRecoveryServerService.name}`); + } + + public async handleRecover( + req: RecoverMessage, + ): Promise { + const fnTag = `${CrashRecoveryServerService.name}#handleRecover`; + + try { + this.log.debug(`${fnTag} - Handling RecoverMessage:`, req); + + const session = this.sessions.get(req.sessionId); + const sessionData = session?.getServerSessionData(); + if (!session) { + this.log.error(`${fnTag} - Session not found: ${req.sessionId}`); + throw new Error(`Session not found: ${req.sessionId}`); + } + + if (!sessionData) { + this.log.error(`${fnTag} - SessionData not found: ${req.sessionId}`); + throw new Error(`Error: ${req.sessionId}`); + } + + const recoveredLogs = await this.logRepository.fetchLogsFromSequence( + req.sessionId, + req.sequenceNumber, + ); + + if (recoveredLogs.length === 0) { + throw new Error( + `No logs Found: ${req.sessionId}, Sequence Number received: ${req.sequenceNumber}`, + ); + } + + const recoverUpdateMessage = create(RecoverUpdateMessageSchema, { + sessionId: req.sessionId, + messageType: "urn:ietf:SATP-2pc:msgtype:recover-update-msg", + hashRecoverMessage: "", + recoveredLogs: recoveredLogs, + senderSignature: "", + }); + + const signature = bufArray2HexStr( + sign(this.signer, safeStableStringify(recoverUpdateMessage)), + ); + + recoverUpdateMessage.senderSignature = signature; + + await this.dbLogger.persistLogEntry({ + sessionID: sessionData.id, + type: "urn:ietf:SATP-2pc:msgtype:recover-update-msg", + operation: "done", + data: safeStableStringify(sessionData), + sequenceNumber: Number(sessionData.lastSequenceNumber), + }); + + this.log.debug( + `${fnTag} - RecoverUpdateMessage created:`, + recoverUpdateMessage, + ); + + return recoverUpdateMessage; + } catch (error) { + this.log.error(`${fnTag} - Error handling RecoverMessage: ${error}`); + throw error; + } + } + + public async handleRecoverSuccess( + req: RecoverSuccessMessage, + ): Promise { + const fnTag = `${CrashRecoveryServerService.name}#handleRecoverSuccess`; + + try { + this.log.debug( + `${fnTag} - Handling RecoverSuccessMessage:`, + req.sessionId, + ); + + const session = this.sessions.get(req.sessionId); + const sessionData = session?.getServerSessionData(); + if (!session) { + this.log.error(`${fnTag} - Session not found: ${req.sessionId}`); + throw new Error(`Session not found: ${req.sessionId}`); + } + + if (!sessionData) { + this.log.error(`${fnTag} - SessionData not found: ${req.sessionId}`); + throw new Error(`Error: ${req.sessionId}`); + } + + const recoverSuccessMessageResponse = create( + RecoverSuccessMessageResponseSchema, + { + sessionId: req.sessionId, + received: true, + senderSignature: "", + }, + ); + + const signature = bufArray2HexStr( + sign(this.signer, safeStableStringify(recoverSuccessMessageResponse)), + ); + + recoverSuccessMessageResponse.senderSignature = signature; + + await this.dbLogger.persistLogEntry({ + sessionID: recoverSuccessMessageResponse.sessionId, + type: "urn:ietf:SATP-2pc:msgtype:recover-success-msg", + operation: "done", + data: safeStableStringify(sessionData), + sequenceNumber: Number(sessionData.lastSequenceNumber), + }); + + this.log.info(`${fnTag} - Session marked as recovered: ${req.sessionId}`); + return recoverSuccessMessageResponse; + } catch (error) { + this.log.error( + `${fnTag} - Error handling RecoverSuccessMessage: ${error}`, + ); + throw error; + } + } + + public async handleRollback( + req: RollbackMessage, + ): Promise { + const fnTag = `${CrashRecoveryServerService.name}#handleRollback`; + + try { + this.log.debug(`${fnTag} - Handling RollbackMessage:`, req); + + const session = this.sessions.get(req.sessionId); + const sessionData = session?.getServerSessionData(); + if (!session) { + this.log.error(`${fnTag} - Session not found: ${req.sessionId}`); + throw new Error(`Session not found: ${req.sessionId}`); + } + + if (!sessionData) { + this.log.error(`${fnTag} - SessionData not found: ${req.sessionId}`); + throw new Error(`Error: ${req.sessionId}`); + } + + const factory = new RollbackStrategyFactory( + this.bridgesManager, + this.log, + this.dbLogger, + ); + + const strategy = factory.createStrategy(session); + + const rollbackState = await strategy.execute(session); + + const rollbackAckMessage = create(RollbackAckMessageSchema, { + sessionId: req.sessionId, + messageType: "urn:ietf:SATP-2pc:msgtype:rollback-ack-msg", + success: rollbackState.status === "COMPLETED", + actionsPerformed: rollbackState.rollbackLogEntries.map( + (entry) => entry.action, + ), + proofs: [], + senderSignature: "", + }); + + const signature = bufArray2HexStr( + sign(this.signer, safeStableStringify(rollbackAckMessage)), + ); + + rollbackAckMessage.senderSignature = signature; + + await this.dbLogger.persistLogEntry({ + sessionID: sessionData.id, + type: "urn:ietf:SATP-2pc:msgtype:rollback-ack-msg", + operation: "done", + data: safeStableStringify(sessionData), + sequenceNumber: Number(sessionData.lastSequenceNumber), + }); + + this.log.info( + `${fnTag} - Rollback performed for session: ${req.sessionId}`, + ); + + return rollbackAckMessage; + } catch (error) { + this.log.error(`${fnTag} - Error handling RollbackMessage: ${error}`); + throw error; + } + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/satp-session.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/satp-session.ts index dab36d5960..a97f3be9aa 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/satp-session.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/satp-session.ts @@ -143,6 +143,27 @@ export class SATPSession { return this.clientSessionData; } + public static getSession(sessionData: SessionData): SATPSession { + const isClient = !!sessionData.senderAsset; + const isServer = !!sessionData.receiverAsset; + + const session = new SATPSession({ + contextID: sessionData.transferContextId, + sessionID: sessionData.id, + server: isServer, + client: isClient, + }); + + if (isServer) { + session.serverSessionData = sessionData; + } + if (isClient) { + session.clientSessionData = sessionData; + } + + return session; + } + public createSessionData( type: SessionType, sessionId: string, diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/types.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/types.ts index b7613010c8..3eb11e0490 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/core/types.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/core/types.ts @@ -84,6 +84,8 @@ export interface SATPGatewayConfig { bridgesConfig?: NetworkConfig[]; knexLocalConfig?: Knex.Config; knexRemoteConfig?: Knex.Config; + enableMigration?: boolean; + enableCrashManager?: boolean; } // export interface SATPBridgeConfig { @@ -107,7 +109,7 @@ export function isOfType( } export interface LocalLog { - sessionID: string; + sessionId: string; type: string; key: string; operation: string; @@ -127,3 +129,10 @@ export interface SATPBridgeConfig { logLevel?: LogLevelDesc; } export { SATPServiceInstance }; + +export enum CrashStatus { + IDLE = "IDLE", + IN_RECOVERY = "IN_RECOVERY", + IN_ROLLBACK = "IN_ROLLBACK", + ERROR = "ERROR", +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/generated/proto/cacti/satp/v02/crash_recovery_connect.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/generated/proto/cacti/satp/v02/crash_recovery_connect.ts index 5c58c6797e..d5c0f27c49 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/generated/proto/cacti/satp/v02/crash_recovery_connect.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/generated/proto/cacti/satp/v02/crash_recovery_connect.ts @@ -3,9 +3,10 @@ /* eslint-disable */ // @ts-nocheck +import { RecoverMessage, RecoverSuccessMessage, RecoverSuccessMessageResponse, RecoverUpdateMessage, RollbackAckMessage, RollbackMessage } from "./crash_recovery_pb.js"; +import { MethodKind } from "@bufbuild/protobuf"; + /** - * TODO: Rollback and crash-recovery related - * * util RPCs * * @generated from service cacti.satp.v02.crash.CrashRecovery @@ -13,6 +14,35 @@ export const CrashRecovery = { typeName: "cacti.satp.v02.crash.CrashRecovery", methods: { + /** + * step RPCs + * + * @generated from rpc cacti.satp.v02.crash.CrashRecovery.RecoverV2Message + */ + recoverV2Message: { + name: "RecoverV2Message", + I: RecoverMessage, + O: RecoverUpdateMessage, + kind: MethodKind.Unary, + }, + /** + * @generated from rpc cacti.satp.v02.crash.CrashRecovery.RecoverV2SuccessMessage + */ + recoverV2SuccessMessage: { + name: "RecoverV2SuccessMessage", + I: RecoverSuccessMessage, + O: RecoverSuccessMessageResponse, + kind: MethodKind.Unary, + }, + /** + * @generated from rpc cacti.satp.v02.crash.CrashRecovery.RollbackV2Message + */ + rollbackV2Message: { + name: "RollbackV2Message", + I: RollbackMessage, + O: RollbackAckMessage, + kind: MethodKind.Unary, + }, } } as const; diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/generated/proto/cacti/satp/v02/crash_recovery_pb.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/generated/proto/cacti/satp/v02/crash_recovery_pb.ts index 21d6c15775..42892a53bf 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/generated/proto/cacti/satp/v02/crash_recovery_pb.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/generated/proto/cacti/satp/v02/crash_recovery_pb.ts @@ -2,24 +2,437 @@ // @generated from file cacti/satp/v02/crash_recovery.proto (package cacti.satp.v02.crash, syntax proto3) /* eslint-disable */ -import type { GenFile, GenService } from "@bufbuild/protobuf/codegenv1"; -import { fileDesc, serviceDesc } from "@bufbuild/protobuf/codegenv1"; +import type { GenFile, GenMessage, GenService } from "@bufbuild/protobuf/codegenv1"; +import { fileDesc, messageDesc, serviceDesc } from "@bufbuild/protobuf/codegenv1"; import { file_google_protobuf_empty } from "@bufbuild/protobuf/wkt"; +import type { Message } from "@bufbuild/protobuf"; /** * Describes the file cacti/satp/v02/crash_recovery.proto. */ export const file_cacti_satp_v02_crash_recovery: GenFile = /*@__PURE__*/ - fileDesc("CiNjYWN0aS9zYXRwL3YwMi9jcmFzaF9yZWNvdmVyeS5wcm90bxIUY2FjdGkuc2F0cC52MDIuY3Jhc2gyDwoNQ3Jhc2hSZWNvdmVyeWIGcHJvdG8z", [file_google_protobuf_empty]); + fileDesc("CiNjYWN0aS9zYXRwL3YwMi9jcmFzaF9yZWNvdmVyeS5wcm90bxIUY2FjdGkuc2F0cC52MDIuY3Jhc2gi0wEKDlJlY292ZXJNZXNzYWdlEhIKCnNlc3Npb25faWQYASABKAkSFAoMbWVzc2FnZV90eXBlGAIgASgJEhIKCnNhdHBfcGhhc2UYAyABKAkSFwoPc2VxdWVuY2VfbnVtYmVyGAQgASgFEhEKCWlzX2JhY2t1cBgFIAEoCBIfChduZXdfaWRlbnRpdHlfcHVibGljX2tleRgGIAEoCRIcChRsYXN0X2VudHJ5X3RpbWVzdGFtcBgHIAEoAxIYChBzZW5kZXJfc2lnbmF0dXJlGAggASgJIrABChRSZWNvdmVyVXBkYXRlTWVzc2FnZRISCgpzZXNzaW9uX2lkGAEgASgJEhQKDG1lc3NhZ2VfdHlwZRgCIAEoCRIcChRoYXNoX3JlY292ZXJfbWVzc2FnZRgDIAEoCRI2Cg5yZWNvdmVyZWRfbG9ncxgEIAMoCzIeLmNhY3RpLnNhdHAudjAyLmNyYXNoLkxvY2FsTG9nEhgKEHNlbmRlcl9zaWduYXR1cmUYBSABKAkiqgEKFVJlY292ZXJTdWNjZXNzTWVzc2FnZRISCgpzZXNzaW9uX2lkGAEgASgJEhQKDG1lc3NhZ2VfdHlwZRgCIAEoCRIjChtoYXNoX3JlY292ZXJfdXBkYXRlX21lc3NhZ2UYAyABKAkSDwoHc3VjY2VzcxgEIAEoCBIXCg9lbnRyaWVzX2NoYW5nZWQYBSADKAkSGAoQc2VuZGVyX3NpZ25hdHVyZRgGIAEoCSJfCh1SZWNvdmVyU3VjY2Vzc01lc3NhZ2VSZXNwb25zZRISCgpzZXNzaW9uX2lkGAEgASgJEhAKCHJlY2VpdmVkGAIgASgIEhgKEHNlbmRlcl9zaWduYXR1cmUYAyABKAkikQEKD1JvbGxiYWNrTWVzc2FnZRISCgpzZXNzaW9uX2lkGAEgASgJEhQKDG1lc3NhZ2VfdHlwZRgCIAEoCRIPCgdzdWNjZXNzGAMgASgIEhkKEWFjdGlvbnNfcGVyZm9ybWVkGAQgAygJEg4KBnByb29mcxgFIAMoCRIYChBzZW5kZXJfc2lnbmF0dXJlGAYgASgJIpQBChJSb2xsYmFja0Fja01lc3NhZ2USEgoKc2Vzc2lvbl9pZBgBIAEoCRIUCgxtZXNzYWdlX3R5cGUYAiABKAkSDwoHc3VjY2VzcxgDIAEoCBIZChFhY3Rpb25zX3BlcmZvcm1lZBgEIAMoCRIOCgZwcm9vZnMYBSADKAkSGAoQc2VuZGVyX3NpZ25hdHVyZRgGIAEoCSKGAQoITG9jYWxMb2cSEgoKc2Vzc2lvbl9pZBgBIAEoCRIMCgR0eXBlGAIgASgJEgsKA2tleRgDIAEoCRIRCglvcGVyYXRpb24YBCABKAkSEQoJdGltZXN0YW1wGAUgASgJEgwKBGRhdGEYBiABKAkSFwoPc2VxdWVuY2VfbnVtYmVyGAcgASgFInkKEFJvbGxiYWNrTG9nRW50cnkSEgoKc2Vzc2lvbl9pZBgBIAEoCRINCgVzdGFnZRgCIAEoCRIRCgl0aW1lc3RhbXAYAyABKAkSDgoGYWN0aW9uGAQgASgJEg4KBnN0YXR1cxgFIAEoCRIPCgdkZXRhaWxzGAYgASgJIuABCg1Sb2xsYmFja1N0YXRlEhIKCnNlc3Npb25faWQYASABKAkSFQoNY3VycmVudF9zdGFnZRgCIAEoCRIXCg9zdGVwc19yZW1haW5pbmcYAyABKAUSRAoUcm9sbGJhY2tfbG9nX2VudHJpZXMYBCADKAsyJi5jYWN0aS5zYXRwLnYwMi5jcmFzaC5Sb2xsYmFja0xvZ0VudHJ5EiQKHGVzdGltYXRlZF90aW1lX3RvX2NvbXBsZXRpb24YBSABKAkSDgoGc3RhdHVzGAYgASgJEg8KB2RldGFpbHMYByABKAky2AIKDUNyYXNoUmVjb3ZlcnkSZAoQUmVjb3ZlclYyTWVzc2FnZRIkLmNhY3RpLnNhdHAudjAyLmNyYXNoLlJlY292ZXJNZXNzYWdlGiouY2FjdGkuc2F0cC52MDIuY3Jhc2guUmVjb3ZlclVwZGF0ZU1lc3NhZ2USewoXUmVjb3ZlclYyU3VjY2Vzc01lc3NhZ2USKy5jYWN0aS5zYXRwLnYwMi5jcmFzaC5SZWNvdmVyU3VjY2Vzc01lc3NhZ2UaMy5jYWN0aS5zYXRwLnYwMi5jcmFzaC5SZWNvdmVyU3VjY2Vzc01lc3NhZ2VSZXNwb25zZRJkChFSb2xsYmFja1YyTWVzc2FnZRIlLmNhY3RpLnNhdHAudjAyLmNyYXNoLlJvbGxiYWNrTWVzc2FnZRooLmNhY3RpLnNhdHAudjAyLmNyYXNoLlJvbGxiYWNrQWNrTWVzc2FnZWIGcHJvdG8z", [file_google_protobuf_empty]); + +/** + * @generated from message cacti.satp.v02.crash.RecoverMessage + */ +export type RecoverMessage = Message<"cacti.satp.v02.crash.RecoverMessage"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: string message_type = 2; + */ + messageType: string; + + /** + * @generated from field: string satp_phase = 3; + */ + satpPhase: string; + + /** + * @generated from field: int32 sequence_number = 4; + */ + sequenceNumber: number; + + /** + * @generated from field: bool is_backup = 5; + */ + isBackup: boolean; + + /** + * @generated from field: string new_identity_public_key = 6; + */ + newIdentityPublicKey: string; + + /** + * @generated from field: int64 last_entry_timestamp = 7; + */ + lastEntryTimestamp: bigint; + + /** + * @generated from field: string sender_signature = 8; + */ + senderSignature: string; +}; + +/** + * Describes the message cacti.satp.v02.crash.RecoverMessage. + * Use `create(RecoverMessageSchema)` to create a new message. + */ +export const RecoverMessageSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 0); + +/** + * @generated from message cacti.satp.v02.crash.RecoverUpdateMessage + */ +export type RecoverUpdateMessage = Message<"cacti.satp.v02.crash.RecoverUpdateMessage"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: string message_type = 2; + */ + messageType: string; + + /** + * @generated from field: string hash_recover_message = 3; + */ + hashRecoverMessage: string; + + /** + * @generated from field: repeated cacti.satp.v02.crash.LocalLog recovered_logs = 4; + */ + recoveredLogs: LocalLog[]; + + /** + * @generated from field: string sender_signature = 5; + */ + senderSignature: string; +}; + +/** + * Describes the message cacti.satp.v02.crash.RecoverUpdateMessage. + * Use `create(RecoverUpdateMessageSchema)` to create a new message. + */ +export const RecoverUpdateMessageSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 1); + +/** + * @generated from message cacti.satp.v02.crash.RecoverSuccessMessage + */ +export type RecoverSuccessMessage = Message<"cacti.satp.v02.crash.RecoverSuccessMessage"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: string message_type = 2; + */ + messageType: string; + + /** + * @generated from field: string hash_recover_update_message = 3; + */ + hashRecoverUpdateMessage: string; + + /** + * @generated from field: bool success = 4; + */ + success: boolean; + + /** + * @generated from field: repeated string entries_changed = 5; + */ + entriesChanged: string[]; + + /** + * @generated from field: string sender_signature = 6; + */ + senderSignature: string; +}; + +/** + * Describes the message cacti.satp.v02.crash.RecoverSuccessMessage. + * Use `create(RecoverSuccessMessageSchema)` to create a new message. + */ +export const RecoverSuccessMessageSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 2); + +/** + * @generated from message cacti.satp.v02.crash.RecoverSuccessMessageResponse + */ +export type RecoverSuccessMessageResponse = Message<"cacti.satp.v02.crash.RecoverSuccessMessageResponse"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: bool received = 2; + */ + received: boolean; + + /** + * @generated from field: string sender_signature = 3; + */ + senderSignature: string; +}; + +/** + * Describes the message cacti.satp.v02.crash.RecoverSuccessMessageResponse. + * Use `create(RecoverSuccessMessageResponseSchema)` to create a new message. + */ +export const RecoverSuccessMessageResponseSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 3); + +/** + * @generated from message cacti.satp.v02.crash.RollbackMessage + */ +export type RollbackMessage = Message<"cacti.satp.v02.crash.RollbackMessage"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: string message_type = 2; + */ + messageType: string; + + /** + * @generated from field: bool success = 3; + */ + success: boolean; + + /** + * @generated from field: repeated string actions_performed = 4; + */ + actionsPerformed: string[]; + + /** + * @generated from field: repeated string proofs = 5; + */ + proofs: string[]; + + /** + * @generated from field: string sender_signature = 6; + */ + senderSignature: string; +}; + +/** + * Describes the message cacti.satp.v02.crash.RollbackMessage. + * Use `create(RollbackMessageSchema)` to create a new message. + */ +export const RollbackMessageSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 4); + +/** + * @generated from message cacti.satp.v02.crash.RollbackAckMessage + */ +export type RollbackAckMessage = Message<"cacti.satp.v02.crash.RollbackAckMessage"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: string message_type = 2; + */ + messageType: string; + + /** + * @generated from field: bool success = 3; + */ + success: boolean; + + /** + * @generated from field: repeated string actions_performed = 4; + */ + actionsPerformed: string[]; + + /** + * @generated from field: repeated string proofs = 5; + */ + proofs: string[]; + + /** + * @generated from field: string sender_signature = 6; + */ + senderSignature: string; +}; + +/** + * Describes the message cacti.satp.v02.crash.RollbackAckMessage. + * Use `create(RollbackAckMessageSchema)` to create a new message. + */ +export const RollbackAckMessageSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 5); + +/** + * @generated from message cacti.satp.v02.crash.LocalLog + */ +export type LocalLog = Message<"cacti.satp.v02.crash.LocalLog"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: string type = 2; + */ + type: string; + + /** + * @generated from field: string key = 3; + */ + key: string; + + /** + * @generated from field: string operation = 4; + */ + operation: string; + + /** + * @generated from field: string timestamp = 5; + */ + timestamp: string; + + /** + * @generated from field: string data = 6; + */ + data: string; + + /** + * @generated from field: int32 sequence_number = 7; + */ + sequenceNumber: number; +}; + +/** + * Describes the message cacti.satp.v02.crash.LocalLog. + * Use `create(LocalLogSchema)` to create a new message. + */ +export const LocalLogSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 6); + +/** + * @generated from message cacti.satp.v02.crash.RollbackLogEntry + */ +export type RollbackLogEntry = Message<"cacti.satp.v02.crash.RollbackLogEntry"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: string stage = 2; + */ + stage: string; + + /** + * @generated from field: string timestamp = 3; + */ + timestamp: string; + + /** + * action performed during rollback + * + * @generated from field: string action = 4; + */ + action: string; + + /** + * status of rollback (e.g., SUCCESS, FAILED) + * + * @generated from field: string status = 5; + */ + status: string; + + /** + * Additional details or metadata about the rollback + * + * @generated from field: string details = 6; + */ + details: string; +}; + +/** + * Describes the message cacti.satp.v02.crash.RollbackLogEntry. + * Use `create(RollbackLogEntrySchema)` to create a new message. + */ +export const RollbackLogEntrySchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 7); + +/** + * @generated from message cacti.satp.v02.crash.RollbackState + */ +export type RollbackState = Message<"cacti.satp.v02.crash.RollbackState"> & { + /** + * @generated from field: string session_id = 1; + */ + sessionId: string; + + /** + * @generated from field: string current_stage = 2; + */ + currentStage: string; + + /** + * @generated from field: int32 steps_remaining = 3; + */ + stepsRemaining: number; + + /** + * @generated from field: repeated cacti.satp.v02.crash.RollbackLogEntry rollback_log_entries = 4; + */ + rollbackLogEntries: RollbackLogEntry[]; + + /** + * @generated from field: string estimated_time_to_completion = 5; + */ + estimatedTimeToCompletion: string; + + /** + * Overall status (e.g., IN_PROGRESS, COMPLETED, FAILED) + * + * @generated from field: string status = 6; + */ + status: string; + + /** + * Additional metadata or information + * + * @generated from field: string details = 7; + */ + details: string; +}; + +/** + * Describes the message cacti.satp.v02.crash.RollbackState. + * Use `create(RollbackStateSchema)` to create a new message. + */ +export const RollbackStateSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_cacti_satp_v02_crash_recovery, 8); /** - * TODO: Rollback and crash-recovery related - * * util RPCs * * @generated from service cacti.satp.v02.crash.CrashRecovery */ export const CrashRecovery: GenService<{ + /** + * step RPCs + * + * @generated from rpc cacti.satp.v02.crash.CrashRecovery.RecoverV2Message + */ + recoverV2Message: { + methodKind: "unary"; + input: typeof RecoverMessageSchema; + output: typeof RecoverUpdateMessageSchema; + }, + /** + * @generated from rpc cacti.satp.v02.crash.CrashRecovery.RecoverV2SuccessMessage + */ + recoverV2SuccessMessage: { + methodKind: "unary"; + input: typeof RecoverSuccessMessageSchema; + output: typeof RecoverSuccessMessageResponseSchema; + }, + /** + * @generated from rpc cacti.satp.v02.crash.CrashRecovery.RollbackV2Message + */ + rollbackV2Message: { + methodKind: "unary"; + input: typeof RollbackMessageSchema; + output: typeof RollbackAckMessageSchema; + }, }> = /*@__PURE__*/ serviceDesc(file_cacti_satp_v02_crash_recovery, 0); diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/crash-manager.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/crash-manager.ts new file mode 100644 index 0000000000..ecbece1fb4 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/crash-manager.ts @@ -0,0 +1,726 @@ +import { + Logger, + LoggerProvider, + Checks, + LogLevelDesc, + JsObjectSigner, +} from "@hyperledger/cactus-common"; +import { + SessionData, + State, +} from "../generated/proto/cacti/satp/v02/common/session_pb"; +import { CrashRecoveryHandler } from "../core/crash-management/crash-handler"; +import { SATPSession } from "../core/satp-session"; +import { + RollbackStrategy, + RollbackStrategyFactory, +} from "../core/crash-management/rollback/rollback-strategy-factory"; +import { + ILocalLogRepository, + IRemoteLogRepository, +} from "../repository/interfaces/repository"; +import { + RecoverUpdateMessage, + RollbackState, + RollbackAckMessage, +} from "../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { SATPBridgesManager } from "./satp-bridges-manager"; +import schedule, { Job } from "node-schedule"; +import { CrashRecoveryServerService } from "../core/crash-management/server-service"; +import { CrashRecoveryClientService } from "../core/crash-management/client-service"; +import { GatewayOrchestrator } from "./gateway-orchestrator"; +import { Client as PromiseConnectClient } from "@connectrpc/connect"; +import { GatewayIdentity, SupportedChain } from "../core/types"; +import { CrashRecovery } from "../generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { SATPHandler } from "../types/satp-protocol"; +import { ISATPLoggerConfig, SATPLogger } from "../logging"; +import { CrashStatus } from "../core/types"; + +export interface ICrashRecoveryManagerOptions { + logLevel?: LogLevelDesc; + localRepository: ILocalLogRepository; + remoteRepository: IRemoteLogRepository; + instanceId: string; + bridgeConfig: SATPBridgesManager; + orchestrator: GatewayOrchestrator; + signer: JsObjectSigner; + pubKey: string; + healthCheckInterval?: string; +} + +export class CrashManager { + public static readonly CLASS_NAME = "CrashManager"; + private readonly log: Logger; + private readonly instanceId: string; + public sessions: Map; + public sessionProcessStatus: Map = new Map(); + private crashRecoveryHandler: CrashRecoveryHandler; + private factory: RollbackStrategyFactory; + public localRepository: ILocalLogRepository; + public remoteRepository: IRemoteLogRepository; + private crashScheduler?: Job; + private crashRecoveryServerService: CrashRecoveryServerService; + private crashRecoveryClientService: CrashRecoveryClientService; + private orchestrator: GatewayOrchestrator; + private gatewaysPubKeys: Map = new Map(); + private readonly bridgesManager: SATPBridgesManager; + public dbLogger: SATPLogger; + private signer: JsObjectSigner; + private _pubKey: string; + + constructor(public readonly options: ICrashRecoveryManagerOptions) { + const fnTag = `${CrashManager.CLASS_NAME}#constructor()`; + Checks.truthy(options, `${fnTag} arg options`); + + const level = this.options.logLevel; + const label = this.className; + this.log = LoggerProvider.getOrCreate({ level, label }); + this.log.info(`Instantiated ${this.className} OK`); + this.instanceId = options.instanceId; + this.sessions = new Map(); + this.localRepository = options.localRepository; + this.remoteRepository = options.remoteRepository; + this._pubKey = options.pubKey; + this.signer = options.signer; + this.orchestrator = options.orchestrator; + this.bridgesManager = options.bridgeConfig; + this.loadPubKeys(this.orchestrator.getCounterPartyGateways()); + + const satpLoggerConfig: ISATPLoggerConfig = { + localRepository: this.localRepository, + remoteRepository: this.remoteRepository, + signer: this.signer, + pubKey: this._pubKey, + }; + + this.dbLogger = new SATPLogger(satpLoggerConfig); + this.log.debug(`${fnTag} dbLogger initialized: ${!!this.dbLogger}`); + + this.factory = new RollbackStrategyFactory( + this.bridgesManager, + this.log, + this.dbLogger, + ); + + this.crashRecoveryServerService = new CrashRecoveryServerService( + this.bridgesManager, + this.localRepository, + this.sessions, + this.dbLogger, + this.signer, + this.log, + ); + + this.crashRecoveryClientService = new CrashRecoveryClientService( + this.dbLogger, + this.log, + this.signer, + ); + + this.crashRecoveryHandler = new CrashRecoveryHandler( + this.crashRecoveryServerService, + this.crashRecoveryClientService, + this.log, + ); + + const crashRecoveryHandlers = new Map(); + crashRecoveryHandlers.set("crash-handler", this.crashRecoveryHandler); + this.orchestrator.addHandlers(crashRecoveryHandlers); + } + + get className(): string { + return CrashManager.CLASS_NAME; + } + + public getInstanceId(): string { + return this.instanceId; + } + + public async recoverSessions() { + const fnTag = `${this.className}#recoverSessions()`; + + try { + const allLogs = await this.localRepository.readLogsNotProofs(); + for (const log of allLogs) { + const sessionId = log.sessionId; + this.log.info( + `${fnTag}, recovering session from database: ${sessionId}`, + ); + + if (!log || !log.data) { + throw new Error(`${fnTag}, invalid log`); + } + + try { + const sessionData: SessionData = JSON.parse(log.data); + const satpSession = SATPSession.getSession(sessionData); + this.sessions.set(sessionId, satpSession); + } catch (error) { + this.log.error( + `Error parsing log data for session Id: ${sessionId}: ${error}`, + ); + } + } + + if (this.sessions.size === 0) { + this.log.info(`${fnTag} No active sessions!`); + } else { + this.crashScheduler = schedule.scheduleJob( + this.options.healthCheckInterval || "*/5 * * * * *", + async () => { + await this.checkAndResolveCrashes(); + }, + ); + this.log.info(`${fnTag} Crash detection job is running`); + } + } catch (error) { + this.log.error(`Error initializing sessions: ${error}`); + } + } + + private updateSessionState(sessionId: string, newState: State): string { + const session = this.sessions.get(sessionId); + if (!session) { + throw new Error(`Session with ID ${sessionId} not found.`); + } + + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + + sessionData.state = newState; + this.sessions.set(sessionId, session); + + return State[sessionData.state]; + } + + public stopScheduler(): void { + const fnTag = `${this.className}#stopScheduler()`; + + if (this.crashScheduler) { + this.crashScheduler.cancel(); + this.crashScheduler = undefined; + this.log.info(`${fnTag} crash detection job stopped successfully`); + } else { + this.log.warn(`${fnTag} No active crash detection job to stop`); + } + } + + public async checkAndResolveCrashes(): Promise { + const fnTag = `${this.className}#checkAndResolveCrashes()`; + + if (this.sessions.size === 0) { + this.log.info( + `${fnTag} No sessions to check. Waiting for new sessions...`, + ); + return; + } + + for (const [sessionId, session] of this.sessions.entries()) { + await this.checkAndResolveCrash(session); + const currentSession = this.sessions.get(sessionId); + const processStatus = + this.sessionProcessStatus.get(sessionId) || CrashStatus.IDLE; + + if ( + processStatus === CrashStatus.IN_RECOVERY || + processStatus === CrashStatus.IN_ROLLBACK + ) { + this.log.debug( + `${fnTag} Session ${sessionId} is currently ${processStatus}, skipping.`, + ); + continue; // Skip this session + } + + if (!currentSession) { + this.log.warn( + `${fnTag} Updated session with ID ${sessionId} not found after resolution.`, + ); + continue; + } + + const currentSessionData = currentSession.hasClientSessionData() + ? currentSession.getClientSessionData() + : currentSession.getServerSessionData(); + + if (!currentSessionData) { + this.log.warn( + `${fnTag} Session data for session ID ${sessionId} is undefined.`, + ); + continue; + } + + this.log.debug( + `${fnTag} Session ${sessionId} state: ${State[currentSessionData.state]}`, + ); + } + } + + public async checkAndResolveCrash(session: SATPSession): Promise { + const fnTag = `${this.className}#checkAndResolveCrash()`; + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + + if (!sessionData) { + throw new Error(`${fnTag}, session data is not correctly initialized`); + } + + try { + let attempts = 0; + const maxRetries = Number(sessionData.maxRetries); + + while (attempts < maxRetries) { + const crashStatus = await this.checkCrash(session); + // RECOVERY + if (crashStatus === CrashStatus.IN_RECOVERY) { + this.log.info( + `${fnTag} Crash detected! Attempting recovery for session ${session.getSessionId()}`, + ); + this.sessionProcessStatus.set( + session.getSessionId(), + CrashStatus.IN_RECOVERY, + ); + const recoverySuccess = await this.handleRecovery(session); + + if (recoverySuccess) { + const sessionId = session.getSessionId(); + this.sessionProcessStatus.set(sessionId, CrashStatus.IDLE); + const updatedState = this.updateSessionState( + sessionId, + State.RECOVERED, + ); + + this.log.info(`${fnTag} crash state : ${updatedState}`); + + this.log.info( + `${fnTag} Recovery successful for sessionID: ${session.getSessionId()}`, + ); + return; + } else { + attempts++; + this.log.info( + `${fnTag} Recovery attempt ${attempts} failed for sessionID: ${session.getSessionId()}`, + ); + } + // ROLLBACK + } else if (crashStatus === CrashStatus.IN_ROLLBACK) { + this.sessionProcessStatus.set( + session.getSessionId(), + CrashStatus.IN_ROLLBACK, + ); + this.log.warn( + `${fnTag} Initiating rollback for session ${session.getSessionId()}!`, + ); + + const rollbackSuccess = await this.initiateRollback(session, true); + if (rollbackSuccess) { + this.sessionProcessStatus.set( + session.getSessionId(), + CrashStatus.IDLE, + ); + this.log.info( + `${fnTag} Rollback completed for session ${session.getSessionId()}.`, + ); + } else { + this.log.error( + `${fnTag} Rollback failed for session ${session.getSessionId()}.`, + ); + this.sessionProcessStatus.set( + session.getSessionId(), + CrashStatus.ERROR, + ); + } + return; // Exit after rollback process + } else if (crashStatus === CrashStatus.IDLE) { + this.log.info( + `${fnTag} No crash detected for session ID: ${session.getSessionId()}`, + ); + return; // Exit if no crash + } else { + this.log.warn(`${fnTag} Unhandled crash status: ${crashStatus}`); + return; + } + } + + this.log.warn( + `${fnTag} All recovery attempts exhausted. Initiating rollback for session ${session.getSessionId()}.`, + ); + await this.initiateRollback(session, true); + } catch (error) { + this.log.error(`${fnTag} Error during crash resolution: ${error}`); + } + } + + private async checkCrash(session: SATPSession): Promise { + const fnTag = `${this.className}#checkCrash()`; + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + + try { + if (!this.localRepository) { + this.log.error( + `${fnTag} Local repository is not available. Unable to proceed.`, + ); + return CrashStatus.ERROR; + } + + let lastLog = null; + try { + lastLog = await this.localRepository.readLastestLog( + session.getSessionId(), + ); + } catch (error) { + this.log.error( + `${fnTag} Unable to acquire a connection: ${error.message}`, + ); + return CrashStatus.ERROR; + } + + if (!lastLog) { + this.log.warn( + `${fnTag} No logs found for session ID: ${session.getSessionId()}`, + ); + return CrashStatus.ERROR; + } + + const logTimestamp = new Date(lastLog?.timestamp ?? 0).getTime(); + const currentTime = Date.now(); + const timeDifference = currentTime - logTimestamp; + + switch (true) { + case lastLog.operation !== "done": + this.log.info( + `${fnTag} Crash detected for session ID: ${session.getSessionId()}, last log operation: ${lastLog.operation}`, + ); + return CrashStatus.IN_RECOVERY; + + case timeDifference > Number(sessionData.maxTimeout): + this.log.warn( + `${fnTag} Timeout exceeded by ${timeDifference} ms for session ID: ${session.getSessionId()}`, + ); + return CrashStatus.IN_ROLLBACK; + + default: + this.log.info( + `${fnTag} No crash detected for session ID: ${session.getSessionId()}`, + ); + return CrashStatus.IDLE; + } + } catch (error) { + this.log.error(`${fnTag} Error occurred during crash check: ${error}`); + return CrashStatus.ERROR; + } + } + + public async handleRecovery(session: SATPSession): Promise { + const fnTag = `${this.className}#handleRecovery()`; + this.log.debug( + `${fnTag} - Starting crash recovery for sessionId: ${session.getSessionId()}`, + ); + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + try { + const channel = this.orchestrator.getChannel( + sessionData.recipientGatewayNetworkId as SupportedChain, + ); + + if (!channel) { + throw new Error( + `${fnTag} - Channel not found for the recipient gateway network ID.`, + ); + } + + const counterGatewayID = this.orchestrator.getGatewayIdentity( + channel.toGatewayID, + ); + if (!counterGatewayID) { + throw new Error(`${fnTag} - Counterparty gateway ID not found.`); + } + + const clientCrashRecovery: PromiseConnectClient = + channel.clients.get("crash") as PromiseConnectClient< + typeof CrashRecovery + >; + + if (!clientCrashRecovery) { + throw new Error(`${fnTag} - Failed to get clientCrashRecovery.`); + } + + const recoverMessage = + await this.crashRecoveryHandler.createRecoverMessage(session); + + const recoverUpdateMessage = + await clientCrashRecovery.recoverV2Message(recoverMessage); + + const sequenceNumbers = recoverUpdateMessage.recoveredLogs.map( + (log) => log.sequenceNumber, + ); + this.log.info( + `${fnTag} - Received logs sequence numbers: ${sequenceNumbers}`, + ); + + await this.processRecoverUpdateMessage(recoverUpdateMessage); + + const recoverSuccessMessage = + await this.crashRecoveryHandler.createRecoverSuccessMessage(session); + + await clientCrashRecovery.recoverV2SuccessMessage(recoverSuccessMessage); + + this.log.info( + `${fnTag} - Crash recovery completed for sessionId: ${session.getSessionId()}`, + ); + + return true; + } catch (error) { + this.log.error( + `${fnTag} Error during recovery process for session ID: ${session.getSessionId()} - ${error}`, + ); + throw new Error( + `Recovery failed for session ID: ${session.getSessionId()}`, + ); + } + } + + private async processRecoverUpdateMessage( + message: RecoverUpdateMessage, + ): Promise { + const fnTag = `${this.className}#processRecoverUpdate()`; + try { + const sessionId = message.sessionId; + const recoveredLogs = message.recoveredLogs; + + for (const logEntry of recoveredLogs) { + await this.localRepository.create({ + sessionId: logEntry.sessionId, + operation: logEntry.operation, + data: logEntry.data, + timestamp: logEntry.timestamp, + type: logEntry.type, + key: logEntry.key, + sequenceNumber: logEntry.sequenceNumber, + }); + } + + for (const log of recoveredLogs) { + const sessionId = log.sessionId; + this.log.info(`${fnTag}, recovering session: ${sessionId}`); + + if (!log || !log.data) { + throw new Error(`${fnTag}, invalid log`); + } + + try { + const sessionData: SessionData = JSON.parse(log.data); + const satpSession = SATPSession.getSession(sessionData); + this.sessions.set(sessionId, satpSession); + } catch (error) { + this.log.error( + `Error parsing log data for session Id: ${sessionId}: ${error}`, + ); + } + } + this.log.info( + `${fnTag} Session data successfully reconstructed for session ID: ${sessionId}`, + ); + + return true; + } catch (error) { + this.log.error( + `${fnTag} Error processing RecoverUpdateMessage: ${error}`, + ); + return false; + } + } + + public async initiateRollback( + session: SATPSession, + forceRollback?: boolean, + ): Promise { + const fnTag = `CrashManager#initiateRollback()`; + + const sessionData = session.hasClientSessionData() + ? session.getClientSessionData() + : session.getServerSessionData(); + if (!sessionData) { + throw new Error(`${fnTag}, session data is not correctly initialized`); + } + this.log.info( + `${fnTag} Initiating rollback for session ${session.getSessionId()}`, + ); + + try { + if (forceRollback) { + const strategy = this.factory.createStrategy(session); + const rollbackState = await this.executeRollback(strategy, session); + + if (rollbackState?.status === "COMPLETED") { + const cleanupSuccess = await this.performCleanup( + strategy, + session, + rollbackState, + ); + + const rollbackSuccess = await this.sendRollbackMessage( + session, + rollbackState, + ); + return cleanupSuccess && rollbackSuccess; + } else { + this.log.error( + `${fnTag} Rollback execution failed for session ${session.getSessionId()}`, + ); + return false; + } + } else { + this.log.info( + `${fnTag} Rollback not needed for session ${session.getSessionId()}`, + ); + return true; + } + } catch (error) { + this.log.error(`${fnTag} Error during rollback initiation: ${error}`); + return false; + } + } + + private async executeRollback( + strategy: RollbackStrategy, + session: SATPSession, + ): Promise { + const fnTag = `CrashManager#executeRollback`; + this.log.debug( + `${fnTag} Executing rollback strategy for sessionId: ${session.getSessionId()}`, + ); + + try { + return await strategy.execute(session); + } catch (error) { + this.log.error(`${fnTag} Error executing rollback strategy: ${error}`); + return undefined; + } + } + + private async sendRollbackMessage( + session: SATPSession, + rollbackState: RollbackState, + ): Promise { + const fnTag = `${this.className}#sendRollbackMessage()`; + this.log.debug( + `${fnTag} - Starting to send RollbackMessage for sessionId: ${session.getSessionId()}`, + ); + + try { + const channel = this.orchestrator.getChannel( + session.getClientSessionData() + .recipientGatewayNetworkId as SupportedChain, + ); + + if (!channel) { + throw new Error( + `${fnTag} - Channel not found for the recipient gateway network ID.`, + ); + } + + const counterGatewayID = this.orchestrator.getGatewayIdentity( + channel.toGatewayID, + ); + if (!counterGatewayID) { + throw new Error(`${fnTag} - Counterparty gateway ID not found.`); + } + + const clientCrashRecovery: PromiseConnectClient = + channel.clients.get("crash") as PromiseConnectClient< + typeof CrashRecovery + >; + + if (!clientCrashRecovery) { + throw new Error(`${fnTag} - Failed to get clientCrashRecovery.`); + } + + const rollbackMessage = + await this.crashRecoveryHandler.createRollbackMessage( + session, + rollbackState, + ); + + const rollbackAckMessage = + await clientCrashRecovery.rollbackV2Message(rollbackMessage); + + this.log.info( + `${fnTag} - Received RollbackAckMessage: ${rollbackAckMessage}`, + ); + + const rollbackStatus = + await this.processRollbackAckMessage(rollbackAckMessage); + + return rollbackStatus; + } catch (error) { + this.log.error( + `${fnTag} Error during rollback message sending: ${error}`, + ); + return false; + } + } + + private async processRollbackAckMessage( + message: RollbackAckMessage, + ): Promise { + const fnTag = `${this.className}#processRollbackAckMessage()`; + try { + if (message.success) { + this.log.info( + `${fnTag} Rollback acknowledged by the counterparty for session ID: ${message.sessionId}`, + ); + return true; + } else { + this.log.warn( + `${fnTag} Rollback failed at counterparty for session ID: ${message.sessionId}`, + ); + return false; + } + } catch (error) { + this.log.error(`${fnTag} Error processing RollbackAckMessage: ${error}`); + return false; + } + } + + private async performCleanup( + strategy: RollbackStrategy, + session: SATPSession, + state: RollbackState, + ): Promise { + const fnTag = `CrashManager#performCleanup`; + this.log.debug( + `${fnTag} Performing cleanup after rollback for session ${session.getSessionId()}`, + ); + + try { + const updatedState = await strategy.cleanup(session, state); + + // TODO: Handle the updated state, perhaps update session data or perform additional actions + this.log.info( + `${fnTag} Cleanup completed. Updated state: ${JSON.stringify(updatedState)}`, + ); + + return true; + } catch (error) { + this.log.error(`${fnTag} Error during cleanup: ${error}`); + return false; + } + } + + private loadPubKeys(gateways: Map): void { + gateways.forEach((gateway) => { + if (gateway.pubKey) { + this.gatewaysPubKeys.set(gateway.id, gateway.pubKey); + } + }); + this.gatewaysPubKeys.set( + this.orchestrator.getSelfId(), + this.orchestrator.ourGateway.pubKey!, + ); + } +} diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/gateway-orchestrator.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/gateway-orchestrator.ts index e266764964..b2f133f5d5 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/gateway-orchestrator.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/gateway-orchestrator.ts @@ -26,6 +26,7 @@ import { SatpStage0Service } from "../generated/proto/cacti/satp/v02/stage_0_pb" import { SatpStage1Service } from "../generated/proto/cacti/satp/v02/stage_1_pb"; import { SatpStage2Service } from "../generated/proto/cacti/satp/v02/stage_2_pb"; import { SatpStage3Service } from "../generated/proto/cacti/satp/v02/stage_3_pb"; +import { CrashRecovery } from "../generated/proto/cacti/satp/v02/crash_recovery_pb"; export interface IGatewayOrchestratorOptions { logLevel?: LogLevelDesc; @@ -327,12 +328,19 @@ export class GatewayOrchestrator { httpVersion: "1.1", }); + const transport4 = createGrpcWebTransport({ + baseUrl: + identity.address + ":" + identity.gatewayServerPort + `/${"crash"}`, + httpVersion: "1.1", + }); + const clients: Map> = new Map(); clients.set("0", this.createStage0ServiceClient(transport0)); clients.set("1", this.createStage1ServiceClient(transport1)); clients.set("2", this.createStage2ServiceClient(transport2)); clients.set("3", this.createStage3ServiceClient(transport3)); + clients.set("crash", this.createCrashServiceClient(transport4)); // todo perform healthcheck on startup; should be in stage 0 return clients; @@ -382,6 +390,17 @@ export class GatewayOrchestrator { return client; } + private createCrashServiceClient( + transport: ConnectTransport, + ): ConnectClient { + this.logger.debug( + "Creating crash-manager client, with transport: ", + transport, + ); + const client = createClient(CrashRecovery, transport); + return client; + } + public async resolveAndAddGateways(IDs: string[]): Promise { const fnTag = `${this.label}#addGateways()`; this.logger.trace(`Entering ${fnTag}`); diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/satp-manager.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/satp-manager.ts index 4595c63172..1f4ce55684 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/satp-manager.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/gol/satp-manager.ts @@ -157,7 +157,7 @@ export class SATPManager { }; this.dbLogger = new SATPLogger(satpLoggerConfig); - this.logger.debug(`SATPManager dbLogger initialized: ${!!this.dbLogger}`); + this.logger.debug(`${fnTag} dbLogger initialized: ${!!this.dbLogger}`); const serviceClasses = [ Stage0ServerService as unknown as SATPServiceInstance, diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/logging.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/logging.ts index 9f024f015c..d7dcf6f65e 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/logging.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/logging.ts @@ -67,7 +67,7 @@ export class SATPLogger { logEntry.operation, ); const localLog: LocalLog = { - sessionID: logEntry.sessionID, + sessionId: logEntry.sessionID, type: logEntry.type, key: key, timestamp: Date.now().toString(), @@ -96,7 +96,7 @@ export class SATPLogger { logEntry.operation, ); const localLog: LocalLog = { - sessionID: logEntry.sessionID, + sessionId: logEntry.sessionID, type: logEntry.type, key: key, timestamp: Date.now().toString(), @@ -116,7 +116,7 @@ export class SATPLogger { private getHash(logEntry: LocalLog): string { const fnTag = `SATPLogger#getHash()`; this.log.debug( - `${fnTag} - generating hash for log entry with sessionID: ${logEntry.sessionID}`, + `${fnTag} - generating hash for log entry with sessionID: ${logEntry.sessionId}`, ); return SHA256( diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/plugin-satp-hermes-gateway.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/plugin-satp-hermes-gateway.ts index dcc3f21970..16827b8d40 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/plugin-satp-hermes-gateway.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/plugin-satp-hermes-gateway.ts @@ -57,9 +57,16 @@ import { SATPBridgesManager, } from "./gol/satp-bridges-manager"; import bodyParser from "body-parser"; +import { + CrashManager, + ICrashRecoveryManagerOptions, +} from "./gol/crash-manager"; import cors from "cors"; import * as OAS from "../json/openapi-blo-bundled.json"; +import knex, { Knex } from "knex"; +import { knexLocalInstance } from "../../knex/knexfile"; +import { knexRemoteInstance } from "../../knex/knexfile-remote"; export class SATPGateway implements IPluginWebService, ICactusPlugin { // todo more checks; example port from config is between 3000 and 9000 @@ -97,6 +104,9 @@ export class SATPGateway implements IPluginWebService, ICactusPlugin { public localRepository?: ILocalLogRepository; public remoteRepository?: IRemoteLogRepository; private readonly shutdownHooks: ShutdownHook[]; + private crashManager?: CrashManager; + public knexLocalConnection?: Knex; + public knexRemoteConnection?: Knex; constructor(public readonly options: SATPGatewayConfig) { const fnTag = `${this.className}#constructor()`; @@ -110,9 +120,10 @@ export class SATPGateway implements IPluginWebService, ICactusPlugin { }; this.logger = LoggerProvider.getOrCreate(logOptions); this.logger.info("Initializing Gateway Coordinator"); - - this.localRepository = new LocalLogRepository(options.knexLocalConfig); - this.remoteRepository = new RemoteLogRepository(options.knexRemoteConfig); + this.localRepository = new LocalLogRepository(this.config.knexLocalConfig); + this.remoteRepository = new RemoteLogRepository( + this.config.knexRemoteConfig, + ); if (this.config.keyPair == undefined) { throw new Error("Key pair is undefined"); @@ -181,6 +192,26 @@ export class SATPGateway implements IPluginWebService, ICactusPlugin { this.OAPIServerEnabled = this.config.enableOpenAPI ?? true; this.OAS = OAS; + + if (this.config.enableCrashManager) { + const crashOptions: ICrashRecoveryManagerOptions = { + instanceId: this.instanceId, + logLevel: this.config.logLevel, + bridgeConfig: this.bridgesManager, + orchestrator: this.gatewayOrchestrator, + localRepository: this.localRepository, + remoteRepository: this.remoteRepository, + signer: this.signer, + pubKey: this.pubKey, + }; + this.crashManager = new CrashManager(crashOptions); + this.logger.info("CrashManager has been initialized."); + } + + if (this.config.enableMigration) { + this.knexLocalConnection = knex(knexLocalInstance.default); + this.knexRemoteConnection = knex(knexRemoteInstance.default); + } } /* ICactus Plugin methods */ @@ -383,6 +414,32 @@ export class SATPGateway implements IPluginWebService, ICactusPlugin { pluginOptions.bridgesConfig = []; } + if (pluginOptions.enableMigration) { + if (!pluginOptions.knexLocalConfig) { + pluginOptions.knexLocalConfig = knexLocalInstance.default; + } else { + throw new Error("Multiple knexLocalConfigs!"); + } + + if (!pluginOptions.knexRemoteConfig) { + pluginOptions.knexRemoteConfig = knexRemoteInstance.default; + } else { + throw new Error("Multiple knexRemoteConfig!"); + } + } else { + if (!pluginOptions.knexLocalConfig) { + throw new Error("knexLocalConfig missing!"); + } + + if (!pluginOptions.knexRemoteConfig) { + throw new Error("knexRemoteConfig missing!"); + } + } + + if (!pluginOptions.enableCrashManager) { + pluginOptions.enableCrashManager = false; + } + return pluginOptions; } @@ -396,6 +453,20 @@ export class SATPGateway implements IPluginWebService, ICactusPlugin { const fnTag = `${this.className}#startup()`; this.logger.trace(`Entering ${fnTag}`); + if (this.config.enableMigration) { + try { + this.logger.info("Running database migrations..."); + await this.knexLocalConnection?.migrate.latest(); + await this.knexRemoteConnection?.migrate.latest(); + this.logger.info("Database migrations completed successfully."); + } catch (error) { + this.logger.error(`Failed to run database migrations: ${error}`); + throw error; + } + } else { + this.logger.info("Migration is disabled! Skipping database migrations."); + } + await Promise.all([this.startupBLOServer()]); await Promise.all([this.startupGOLServer()]); @@ -561,6 +632,18 @@ export class SATPGateway implements IPluginWebService, ICactusPlugin { this.logger.info(`Closed ${connectionsClosed} connections`); this.logger.info("Gateway Coordinator shut down"); + if (this.knexLocalConnection) { + this.logger.debug("Destroying local knex instance..."); + await this.knexLocalConnection.destroy(); + this.logger.debug("Local knex instance destroyed"); + } + + if (this.knexRemoteConnection) { + this.logger.debug("Destroying remote knex instance..."); + await this.knexRemoteConnection.destroy(); + this.logger.debug("Remote knex instance destroyed"); + } + if (this.localRepository) { this.logger.debug("Destroying local repository..."); await this.localRepository.destroy(); diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/repository/knex-local-log-repository.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/repository/knex-local-log-repository.ts index 8b5c454db8..43ba019cf9 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/repository/knex-local-log-repository.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/repository/knex-local-log-repository.ts @@ -59,7 +59,7 @@ export class KnexLocalLogRepository implements ILocalLogRepository { sequenceNumber: number, ): Promise { return this.getLogsTable() - .where("sessionID", sessionId) + .where("sessionId", sessionId) .andWhere("sequenceNumber", ">", sequenceNumber); } diff --git a/packages/cactus-plugin-satp-hermes/src/main/typescript/types/satp-protocol.ts b/packages/cactus-plugin-satp-hermes/src/main/typescript/types/satp-protocol.ts index 624a7326c1..c0b5ad025e 100644 --- a/packages/cactus-plugin-satp-hermes/src/main/typescript/types/satp-protocol.ts +++ b/packages/cactus-plugin-satp-hermes/src/main/typescript/types/satp-protocol.ts @@ -12,6 +12,7 @@ import { Stage0SATPHandler } from "../core/stage-handlers/stage0-handler"; import { Stage1SATPHandler } from "../core/stage-handlers/stage1-handler"; import { Stage2SATPHandler } from "../core/stage-handlers/stage2-handler"; import { Stage3SATPHandler } from "../core/stage-handlers/stage3-handler"; +import { CrashRecoveryHandler } from "../core/crash-management/crash-handler"; /** * Represents a handler for various stages of the SATP (Secure Asset Transfer Protocol). @@ -24,6 +25,7 @@ export enum SATPHandlerType { STAGE1 = "stage-1-handler", STAGE2 = "stage-2-handler", STAGE3 = "stage-3-handler", + CRASH = "crash-handler", } export enum Stage { @@ -52,7 +54,8 @@ export type SATPHandlerInstance = | (typeof Stage0SATPHandler & ISATPHandler) | (typeof Stage1SATPHandler & ISATPHandler) | (typeof Stage2SATPHandler & ISATPHandler) - | (typeof Stage3SATPHandler & ISATPHandler); + | (typeof Stage3SATPHandler & ISATPHandler) + | (typeof CrashRecoveryHandler & ISATPHandler); export interface SATPHandler { setupRouter(router: ConnectRouter): void; diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-1.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-1.test.ts new file mode 100644 index 0000000000..c287ffc24c --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-1.test.ts @@ -0,0 +1,381 @@ +import "jest-extended"; +import { Secp256k1Keys } from "@hyperledger/cactus-common"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { + LocalLog, + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + AssetSchema, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { getSatpLogKey } from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { + SATPGatewayConfig, + PluginFactorySATPGateway, + SATPGateway, +} from "../../../../main/typescript"; +import { + IPluginFactoryOptions, + PluginImportType, +} from "@hyperledger/cactus-core-api"; +import { bufArray2HexStr } from "../../../../main/typescript/gateway-utils"; +import { create } from "@bufbuild/protobuf"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { + MessageStagesHashesSchema, + MessageStagesSignaturesSchema, + MessageStagesTimestampsSchema, + Stage0HashesSchema, + Stage0SignaturesSchema, + Stage0TimestampsSchema, + Stage1HashesSchema, + Stage1SignaturesSchema, + Stage1TimestampsSchema, + State, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; +import { + knexClientConnection, + knexSourceRemoteConnection, +} from "../../knex.config"; +import { Knex, knex } from "knex"; + +let knexInstanceClient: Knex; +let knexInstanceRemote: Knex; + +let gateway1: SATPGateway; +let gateway2: SATPGateway; + +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); +let crashManager1: CrashManager; +let crashManager2: CrashManager; + +/** + * Creates a mock SATPSession: + * - Stage 0 always complete. + * - Stage 1 partial if client; complete if server. + */ +const createMockSession = ( + maxTimeout: string, + maxRetries: string, + isClient: boolean, +): SATPSession => { + const sessionId = uuidv4(); + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: !isClient, + client: isClient, + }); + + const baseTime = new Date(); + const incrementTime = (minutes: number): string => { + baseTime.setMinutes(baseTime.getMinutes() + minutes); + return baseTime.toISOString(); + }; + const sessionData = isClient + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.state = State.RECOVERING; + sessionData.lastSequenceNumber = isClient ? BigInt(1) : BigInt(2); + sessionData.hashes = create(MessageStagesHashesSchema, { + stage0: create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + preSatpTransferRequestMessageHash: "h3", + preSatpTransferResponseMessageHash: "h4", + }), + stage1: isClient + ? create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h1", + }) + : create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h1", + transferProposalReceiptMessageHash: "h2", + }), + }); + + sessionData.processedTimestamps = create(MessageStagesTimestampsSchema, { + stage0: create(Stage0TimestampsSchema, { + newSessionRequestMessageTimestamp: incrementTime(0), + newSessionResponseMessageTimestamp: incrementTime(1), + preSatpTransferRequestMessageTimestamp: incrementTime(2), + preSatpTransferResponseMessageTimestamp: incrementTime(3), + }), + stage1: isClient + ? create(Stage1TimestampsSchema, { + transferProposalRequestMessageTimestamp: incrementTime(5), + }) + : create(Stage1TimestampsSchema, { + transferProposalRequestMessageTimestamp: incrementTime(5), + transferProposalReceiptMessageTimestamp: incrementTime(6), + }), + }); + + sessionData.signatures = create(MessageStagesSignaturesSchema, { + stage0: create(Stage0SignaturesSchema, { + newSessionRequestMessageSignature: "sig_h1", + newSessionResponseMessageSignature: "sig_h2", + preSatpTransferRequestMessageSignature: "sig_h3", + preSatpTransferResponseMessageSignature: "sig_h4", + }), + stage1: isClient + ? create(Stage1SignaturesSchema, { + transferProposalRequestMessageSignature: "sig_h1", + }) + : create(Stage1SignaturesSchema, { + transferProposalRequestMessageSignature: "sig_h1", + transferProposalReceiptMessageSignature: "sig_h2", + }), + }); + + sessionData.senderAsset = create(AssetSchema, { + tokenId: "MOCK_TOKEN_ID", + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + + sessionData.receiverAsset = create(AssetSchema, { + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + + return mockSession; +}; + +beforeAll(async () => { + const factoryOptions: IPluginFactoryOptions = { + pluginImportType: PluginImportType.Local, + }; + const factory = new PluginFactorySATPGateway(factoryOptions); + + const gateway1KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + const gateway2KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + + const gatewayIdentity1: GatewayIdentity = { + id: "mockID-1", + name: "CustomGateway1", + pubKey: bufArray2HexStr(gateway1KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + gatewayServerPort: 3006, + gatewayClientPort: 3001, + gatewayOpenAPIPort: 3002, + }; + + const gatewayIdentity2: GatewayIdentity = { + id: "mockID-2", + name: "CustomGateway2", + pubKey: bufArray2HexStr(gateway2KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.FABRIC], + proofID: "mockProofID11", + address: "http://localhost" as Address, + gatewayServerPort: 3228, + gatewayClientPort: 3211, + gatewayOpenAPIPort: 4210, + }; + + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + + knexInstanceRemote = knex(knexSourceRemoteConnection); + await knexInstanceRemote.migrate.latest(); + + const options1: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity1, + counterPartyGateways: [gatewayIdentity2], + keyPair: gateway1KeyPair, + knexLocalConfig: knexClientConnection, + knexRemoteConfig: knexSourceRemoteConnection, + enableCrashManager: true, + }; + + const options2: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity2, + counterPartyGateways: [gatewayIdentity1], + keyPair: gateway2KeyPair, + enableMigration: true, + enableCrashManager: true, + }; + + gateway1 = (await factory.create(options1)) as SATPGateway; + expect(gateway1).toBeInstanceOf(SATPGateway); + await gateway1.startup(); + + gateway2 = (await factory.create(options2)) as SATPGateway; + expect(gateway2).toBeInstanceOf(SATPGateway); + await gateway2.startup(); +}); + +afterEach(async () => { + jest.clearAllMocks(); +}); + +afterAll(async () => { + if (crashManager1 || crashManager2) { + crashManager1.stopScheduler(); + crashManager1.localRepository.destroy(); + crashManager1.remoteRepository.destroy(); + crashManager2.stopScheduler(); + crashManager2.localRepository.destroy(); + crashManager2.remoteRepository.destroy(); + } + + if (gateway1) { + await gateway1.shutdown(); + } + + if (gateway2) { + await gateway2.shutdown(); + } + if (knexInstanceClient || knexInstanceRemote) { + await knexInstanceClient.destroy(); + await knexInstanceRemote.destroy(); + } +}); + +describe("Stage 1 Recovery Test", () => { + it("should recover Stage 1 hashes, timestamps, signatures, and update session state to RECOVERED", async () => { + crashManager1 = gateway1["crashManager"] as CrashManager; + expect(crashManager1).toBeInstanceOf(CrashManager); + + crashManager2 = gateway2["crashManager"] as CrashManager; + expect(crashManager2).toBeInstanceOf(CrashManager); + + const clientSession = createMockSession("5000", "3", true); + const clientSessionData = clientSession.getClientSessionData(); + const sessionId = clientSessionData.id; + + const clientLogKey = getSatpLogKey(sessionId, "stage1", "partial"); + const clientLogEntry: LocalLog = { + sessionId: sessionId, + type: "stage1", + key: clientLogKey, + operation: "partial", + timestamp: new Date().toISOString(), + data: safeStableStringify(clientSessionData), + sequenceNumber: Number(clientSessionData.lastSequenceNumber), + }; + + const mockClientLogRepo = crashManager1["localRepository"]; + await mockClientLogRepo.create(clientLogEntry); + + const serverSession = createMockSession("5000", "3", false); + const serverSessionData = serverSession.getServerSessionData(); + + serverSessionData.id = sessionId; + + const serverLogKey = getSatpLogKey(sessionId, "stage1", "done"); + const serverLogEntry: LocalLog = { + sessionId: sessionId, + type: "stage1", + key: serverLogKey, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockServerLogRepo = crashManager2["localRepository"]; + await mockServerLogRepo.create(serverLogEntry); + + await crashManager1.recoverSessions(); + await crashManager2.recoverSessions(); + + await new Promise((resolve) => setTimeout(resolve, 5000)); + + const updatedSessionClient = crashManager1["sessions"].get(sessionId); + const updatedSessionDataClient = + updatedSessionClient?.getClientSessionData(); + + const updatedSessionServer = crashManager2["sessions"].get(sessionId); + const updatedSessionDataServer = + updatedSessionServer?.getServerSessionData(); + + expect(updatedSessionDataClient).toBeDefined(); + expect(updatedSessionDataClient?.state).toBe(State.RECOVERED); + + expect(updatedSessionDataClient?.hashes?.stage1).toEqual( + updatedSessionDataServer?.hashes?.stage1, + ); + + expect( + updatedSessionDataClient?.hashes?.stage1 + ?.transferProposalRequestMessageHash, + ).toBe("h1"); + expect( + updatedSessionDataClient?.signatures?.stage1 + ?.transferProposalRequestMessageSignature, + ).toBe("sig_h1"); + + expect( + updatedSessionDataClient?.hashes?.stage1 + ?.transferProposalReceiptMessageHash, + ).toBe("h2"); + expect( + updatedSessionDataClient?.signatures?.stage1 + ?.transferProposalReceiptMessageSignature, + ).toBe("sig_h2"); + + expect(updatedSessionDataClient?.processedTimestamps?.stage1).toEqual( + updatedSessionDataServer?.processedTimestamps?.stage1, + ); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-2.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-2.test.ts new file mode 100644 index 0000000000..f43541aa34 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-2.test.ts @@ -0,0 +1,392 @@ +import "jest-extended"; +import { Secp256k1Keys } from "@hyperledger/cactus-common"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { + LocalLog, + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + AssetSchema, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { getSatpLogKey } from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { + SATPGatewayConfig, + PluginFactorySATPGateway, + SATPGateway, +} from "../../../../main/typescript"; +import { + IPluginFactoryOptions, + PluginImportType, +} from "@hyperledger/cactus-core-api"; +import { bufArray2HexStr } from "../../../../main/typescript/gateway-utils"; +import { create } from "@bufbuild/protobuf"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { + MessageStagesHashesSchema, + MessageStagesSignaturesSchema, + MessageStagesTimestampsSchema, + Stage0HashesSchema, + Stage0SignaturesSchema, + Stage0TimestampsSchema, + Stage1HashesSchema, + Stage1SignaturesSchema, + Stage1TimestampsSchema, + Stage2HashesSchema, + Stage2SignaturesSchema, + Stage2TimestampsSchema, + State, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; +import { + knexClientConnection, + knexSourceRemoteConnection, +} from "../../knex.config"; +import { Knex, knex } from "knex"; + +let knexInstanceClient: Knex; +let knexInstanceRemote: Knex; + +let gateway1: SATPGateway; +let gateway2: SATPGateway; + +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); +let crashManager1: CrashManager; +let crashManager2: CrashManager; + +/** + * Creates a mock SATPSession: + * - Stage 0, 1 are always complete. + * - Stage 2 partial if client; complete if server. + */ +const createMockSession = ( + maxTimeout: string, + maxRetries: string, + isClient: boolean, +): SATPSession => { + const sessionId = uuidv4(); + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: !isClient, + client: isClient, + }); + + const baseTime = new Date(); + const incrementTime = (minutes: number): string => { + baseTime.setMinutes(baseTime.getMinutes() + minutes); + return baseTime.toISOString(); + }; + + const sessionData = isClient + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.state = State.RECOVERING; + sessionData.lastSequenceNumber = isClient ? BigInt(1) : BigInt(2); + sessionData.hashes = create(MessageStagesHashesSchema, { + stage0: create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + preSatpTransferRequestMessageHash: "h3", + preSatpTransferResponseMessageHash: "h4", + }), + stage1: create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h5", + transferProposalReceiptMessageHash: "h6", + transferProposalRejectMessageHash: "h7", + transferCommenceRequestMessageHash: "h8", + transferCommenceResponseMessageHash: "h9", + }), + stage2: isClient + ? create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "h10", + }) + : create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "h10", + lockAssertionReceiptMessageHash: "h11", + }), + }); + + sessionData.processedTimestamps = create(MessageStagesTimestampsSchema, { + stage0: create(Stage0TimestampsSchema, { + newSessionRequestMessageTimestamp: incrementTime(0), + newSessionResponseMessageTimestamp: incrementTime(1), + preSatpTransferRequestMessageTimestamp: incrementTime(2), + preSatpTransferResponseMessageTimestamp: incrementTime(3), + }), + stage1: create(Stage1TimestampsSchema, { + transferProposalRequestMessageTimestamp: incrementTime(4), + transferProposalReceiptMessageTimestamp: incrementTime(5), + transferProposalRejectMessageTimestamp: incrementTime(6), + transferCommenceRequestMessageTimestamp: incrementTime(7), + transferCommenceResponseMessageTimestamp: incrementTime(8), + }), + stage2: isClient + ? create(Stage2TimestampsSchema, { + lockAssertionRequestMessageTimestamp: incrementTime(9), + }) + : create(Stage2TimestampsSchema, { + lockAssertionRequestMessageTimestamp: incrementTime(9), + lockAssertionReceiptMessageTimestamp: incrementTime(10), + }), + }); + + sessionData.signatures = create(MessageStagesSignaturesSchema, { + stage0: create(Stage0SignaturesSchema, { + newSessionRequestMessageSignature: "sig_h1", + newSessionResponseMessageSignature: "sig_h2", + preSatpTransferRequestMessageSignature: "sig_h3", + preSatpTransferResponseMessageSignature: "sig_h4", + }), + stage1: create(Stage1SignaturesSchema, { + transferProposalRequestMessageSignature: "sig_h5", + transferProposalReceiptMessageSignature: "sig_h6", + transferProposalRejectMessageSignature: "sig_h7", + transferCommenceRequestMessageSignature: "sig_h8", + transferCommenceResponseMessageSignature: "sig_h9", + }), + stage2: isClient + ? create(Stage2SignaturesSchema, { + lockAssertionRequestMessageSignature: "sig_h10", + }) + : create(Stage2SignaturesSchema, { + lockAssertionRequestMessageSignature: "sig_h10", + lockAssertionReceiptMessageSignature: "sig_h11", + }), + }); + + sessionData.senderAsset = create(AssetSchema, { + tokenId: "MOCK_TOKEN_ID", + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + sessionData.receiverAsset = create(AssetSchema, { + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + + return mockSession; +}; + +beforeAll(async () => { + const factoryOptions: IPluginFactoryOptions = { + pluginImportType: PluginImportType.Local, + }; + const factory = new PluginFactorySATPGateway(factoryOptions); + + const gateway1KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + const gateway2KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + + const gatewayIdentity1: GatewayIdentity = { + id: "mockID-1", + name: "CustomGateway1", + pubKey: bufArray2HexStr(gateway1KeyPair.publicKey), + version: [{ Core: "v02", Architecture: "v02", Crash: "v02" }], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + gatewayServerPort: 3006, + gatewayClientPort: 3001, + gatewayOpenAPIPort: 3002, + }; + + const gatewayIdentity2: GatewayIdentity = { + id: "mockID-2", + name: "CustomGateway2", + pubKey: bufArray2HexStr(gateway2KeyPair.publicKey), + version: [{ Core: "v02", Architecture: "v02", Crash: "v02" }], + supportedDLTs: [SupportedChain.FABRIC], + proofID: "mockProofID11", + address: "http://localhost" as Address, + gatewayServerPort: 3228, + gatewayClientPort: 3211, + gatewayOpenAPIPort: 4210, + }; + + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + + knexInstanceRemote = knex(knexSourceRemoteConnection); + await knexInstanceRemote.migrate.latest(); + + const options1: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity1, + counterPartyGateways: [gatewayIdentity2], + keyPair: gateway1KeyPair, + knexLocalConfig: knexClientConnection, + knexRemoteConfig: knexSourceRemoteConnection, + enableCrashManager: true, + }; + + const options2: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity2, + counterPartyGateways: [gatewayIdentity1], + keyPair: gateway2KeyPair, + enableMigration: true, + enableCrashManager: true, + }; + + gateway1 = (await factory.create(options1)) as SATPGateway; + expect(gateway1).toBeInstanceOf(SATPGateway); + await gateway1.startup(); + + gateway2 = (await factory.create(options2)) as SATPGateway; + expect(gateway2).toBeInstanceOf(SATPGateway); + await gateway2.startup(); +}); + +afterEach(async () => { + jest.clearAllMocks(); +}); + +afterAll(async () => { + if (crashManager1 || crashManager2) { + crashManager1.stopScheduler(); + crashManager1.localRepository.destroy(); + crashManager1.remoteRepository.destroy(); + + crashManager2.stopScheduler(); + crashManager2.localRepository.destroy(); + crashManager2.remoteRepository.destroy(); + } + + if (gateway1) { + await gateway1.shutdown(); + } + + if (gateway2) { + await gateway2.shutdown(); + } + + if (knexInstanceClient || knexInstanceRemote) { + await knexInstanceClient.destroy(); + await knexInstanceRemote.destroy(); + } +}); + +describe("Stage 2 Recovery Test", () => { + it("should recover Stage 2 hashes and timestamps and update session state to RECOVERED", async () => { + crashManager1 = gateway1["crashManager"] as CrashManager; + expect(crashManager1).toBeInstanceOf(CrashManager); + + crashManager2 = gateway2["crashManager"] as CrashManager; + expect(crashManager2).toBeInstanceOf(CrashManager); + + const clientSession = createMockSession("5000", "3", true); + const clientSessionData = clientSession.getClientSessionData(); + const sessionId = clientSessionData.id; + + const clientLogKey = getSatpLogKey(sessionId, "stage1", "partial"); + const clientLogEntry: LocalLog = { + sessionId: sessionId, + type: "stage1", + key: clientLogKey, + operation: "partial", + timestamp: new Date().toISOString(), + data: safeStableStringify(clientSessionData), + sequenceNumber: Number(clientSessionData.lastSequenceNumber), + }; + + const mockClientLogRepo = crashManager1["localRepository"]; + await mockClientLogRepo.create(clientLogEntry); + + const serverSession = createMockSession("5000", "3", false); + const serverSessionData = serverSession.getServerSessionData(); + + serverSessionData.id = sessionId; + + const serverLogKey = getSatpLogKey(sessionId, "stage1", "done"); + const serverLogEntry: LocalLog = { + sessionId: sessionId, + type: "stage1", + key: serverLogKey, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockServerLogRepo = crashManager2["localRepository"]; + await mockServerLogRepo.create(serverLogEntry); + + await crashManager1.recoverSessions(); + await crashManager2.recoverSessions(); + + await new Promise((resolve) => setTimeout(resolve, 5000)); + + const updatedSessionClient = crashManager1["sessions"].get(sessionId); + const updatedSessionDataClient = + updatedSessionClient?.getClientSessionData(); + const updatedSessionServer = crashManager2["sessions"].get(sessionId); + const updatedSessionDataServer = + updatedSessionServer?.getServerSessionData(); + + expect(updatedSessionDataClient).toBeDefined(); + expect(updatedSessionDataClient?.state).toBe(State.RECOVERED); + + expect(updatedSessionDataClient?.hashes?.stage2).toEqual( + updatedSessionDataServer?.hashes?.stage2, + ); + + expect(updatedSessionDataClient?.processedTimestamps?.stage2).toEqual( + updatedSessionDataServer?.processedTimestamps?.stage2, + ); + + expect(updatedSessionDataClient?.signatures?.stage2).toEqual( + updatedSessionDataServer?.signatures?.stage2, + ); + + expect( + updatedSessionDataClient?.hashes?.stage2?.lockAssertionReceiptMessageHash, + ).toBe("h11"); + expect( + updatedSessionDataClient?.processedTimestamps?.stage2 + ?.lockAssertionReceiptMessageTimestamp, + ).toBeDefined(); + expect( + updatedSessionDataClient?.signatures?.stage2 + ?.lockAssertionReceiptMessageSignature, + ).toBe("sig_h11"); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-3.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-3.test.ts new file mode 100644 index 0000000000..d2522edb83 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/recovery/recovery-stage-3.test.ts @@ -0,0 +1,436 @@ +import "jest-extended"; +import { Secp256k1Keys } from "@hyperledger/cactus-common"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { + LocalLog, + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + AssetSchema, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { getSatpLogKey } from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { + SATPGatewayConfig, + PluginFactorySATPGateway, + SATPGateway, +} from "../../../../main/typescript"; +import { + IPluginFactoryOptions, + PluginImportType, +} from "@hyperledger/cactus-core-api"; +import { bufArray2HexStr } from "../../../../main/typescript/gateway-utils"; +import { create } from "@bufbuild/protobuf"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { + MessageStagesHashesSchema, + MessageStagesSignaturesSchema, + MessageStagesTimestampsSchema, + Stage0HashesSchema, + Stage0SignaturesSchema, + Stage0TimestampsSchema, + Stage1HashesSchema, + Stage1SignaturesSchema, + Stage1TimestampsSchema, + Stage2HashesSchema, + Stage2SignaturesSchema, + Stage2TimestampsSchema, + Stage3HashesSchema, + Stage3SignaturesSchema, + Stage3TimestampsSchema, + State, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; +import { + knexClientConnection, + knexSourceRemoteConnection, +} from "../../knex.config"; +import { Knex, knex } from "knex"; + +let knexInstanceClient: Knex; +let knexInstanceRemote: Knex; + +let gateway1: SATPGateway; +let gateway2: SATPGateway; + +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); +let crashManager1: CrashManager; +let crashManager2: CrashManager; + +/** + * Creates a mock SATPSession: + * - Stage 0, 1, 2 are always complete. + * - Stage 3: partial if client, complete if server. + */ +const createMockSession = ( + maxTimeout: string, + maxRetries: string, + isClient: boolean, +): SATPSession => { + const sessionId = uuidv4(); + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: !isClient, + client: isClient, + }); + + const baseTime = new Date(); + const incrementTime = (minutes: number): string => { + baseTime.setMinutes(baseTime.getMinutes() + minutes); + return baseTime.toISOString(); + }; + + const sessionData = isClient + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.state = State.RECOVERING; + sessionData.lastSequenceNumber = isClient ? BigInt(1) : BigInt(2); + + sessionData.hashes = create(MessageStagesHashesSchema, { + stage0: create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + preSatpTransferRequestMessageHash: "h3", + preSatpTransferResponseMessageHash: "h4", + }), + stage1: create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h5", + transferProposalReceiptMessageHash: "h6", + transferProposalRejectMessageHash: "h7", + transferCommenceRequestMessageHash: "h8", + transferCommenceResponseMessageHash: "h9", + }), + stage2: create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "h10", + lockAssertionReceiptMessageHash: "h11", + }), + stage3: isClient + ? create(Stage3HashesSchema, { + commitPreparationRequestMessageHash: "h12", + }) + : create(Stage3HashesSchema, { + commitPreparationRequestMessageHash: "h12", + commitReadyResponseMessageHash: "h13", + }), + }); + + sessionData.processedTimestamps = create(MessageStagesTimestampsSchema, { + stage0: create(Stage0TimestampsSchema, { + newSessionRequestMessageTimestamp: incrementTime(0), + newSessionResponseMessageTimestamp: incrementTime(1), + preSatpTransferRequestMessageTimestamp: incrementTime(2), + preSatpTransferResponseMessageTimestamp: incrementTime(3), + }), + stage1: create(Stage1TimestampsSchema, { + transferProposalRequestMessageTimestamp: incrementTime(4), + transferProposalReceiptMessageTimestamp: incrementTime(5), + transferProposalRejectMessageTimestamp: incrementTime(6), + transferCommenceRequestMessageTimestamp: incrementTime(7), + transferCommenceResponseMessageTimestamp: incrementTime(8), + }), + stage2: create(Stage2TimestampsSchema, { + lockAssertionRequestMessageTimestamp: incrementTime(9), + lockAssertionReceiptMessageTimestamp: incrementTime(10), + }), + stage3: isClient + ? create(Stage3TimestampsSchema, { + commitPreparationRequestMessageTimestamp: incrementTime(11), + }) + : create(Stage3TimestampsSchema, { + commitPreparationRequestMessageTimestamp: incrementTime(11), + commitReadyResponseMessageTimestamp: incrementTime(12), + }), + }); + + sessionData.signatures = create(MessageStagesSignaturesSchema, { + stage0: create(Stage0SignaturesSchema, { + newSessionRequestMessageSignature: "sig_h1", + newSessionResponseMessageSignature: "sig_h2", + preSatpTransferRequestMessageSignature: "sig_h3", + preSatpTransferResponseMessageSignature: "sig_h4", + }), + stage1: create(Stage1SignaturesSchema, { + transferProposalRequestMessageSignature: "sig_h5", + transferProposalReceiptMessageSignature: "sig_h6", + transferProposalRejectMessageSignature: "sig_h7", + transferCommenceRequestMessageSignature: "sig_h8", + transferCommenceResponseMessageSignature: "sig_h9", + }), + stage2: create(Stage2SignaturesSchema, { + lockAssertionRequestMessageSignature: "sig_h10", + lockAssertionReceiptMessageSignature: "sig_h11", + }), + stage3: isClient + ? create(Stage3SignaturesSchema, { + commitPreparationRequestMessageSignature: "sig_h12", + }) + : create(Stage3SignaturesSchema, { + commitPreparationRequestMessageSignature: "sig_h12", + commitReadyResponseMessageSignature: "sig_h13", + }), + }); + + sessionData.senderAsset = create(AssetSchema, { + tokenId: "MOCK_TOKEN_ID", + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + + sessionData.receiverAsset = create(AssetSchema, { + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + + return mockSession; +}; + +beforeAll(async () => { + const factoryOptions: IPluginFactoryOptions = { + pluginImportType: PluginImportType.Local, + }; + const factory = new PluginFactorySATPGateway(factoryOptions); + + const gateway1KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + const gateway2KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + + const gatewayIdentity1: GatewayIdentity = { + id: "mockID-1", + name: "CustomGateway1", + pubKey: bufArray2HexStr(gateway1KeyPair.publicKey), + version: [{ Core: "v02", Architecture: "v02", Crash: "v02" }], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + gatewayServerPort: 3006, + gatewayClientPort: 3001, + gatewayOpenAPIPort: 3002, + }; + + const gatewayIdentity2: GatewayIdentity = { + id: "mockID-2", + name: "CustomGateway2", + pubKey: bufArray2HexStr(gateway2KeyPair.publicKey), + version: [{ Core: "v02", Architecture: "v02", Crash: "v02" }], + supportedDLTs: [SupportedChain.FABRIC], + proofID: "mockProofID11", + address: "http://localhost" as Address, + gatewayServerPort: 3228, + gatewayClientPort: 3211, + gatewayOpenAPIPort: 4210, + }; + + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + + knexInstanceRemote = knex(knexSourceRemoteConnection); + await knexInstanceRemote.migrate.latest(); + + const options1: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity1, + counterPartyGateways: [gatewayIdentity2], + keyPair: gateway1KeyPair, + knexLocalConfig: knexClientConnection, + knexRemoteConfig: knexSourceRemoteConnection, + enableCrashManager: true, + }; + + const options2: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity2, + counterPartyGateways: [gatewayIdentity1], + keyPair: gateway2KeyPair, + enableMigration: true, + enableCrashManager: true, + }; + + gateway1 = (await factory.create(options1)) as SATPGateway; + expect(gateway1).toBeInstanceOf(SATPGateway); + await gateway1.startup(); + + gateway2 = (await factory.create(options2)) as SATPGateway; + expect(gateway2).toBeInstanceOf(SATPGateway); + await gateway2.startup(); +}); + +afterEach(async () => { + jest.clearAllMocks(); +}); + +afterAll(async () => { + if (crashManager1 || crashManager2) { + crashManager1.stopScheduler(); + crashManager1.localRepository.destroy(); + crashManager1.remoteRepository.destroy(); + + crashManager2.stopScheduler(); + crashManager2.localRepository.destroy(); + crashManager2.remoteRepository.destroy(); + } + + if (gateway1) { + await gateway1.shutdown(); + } + + if (gateway2) { + await gateway2.shutdown(); + } + + if (knexInstanceClient || knexInstanceRemote) { + await knexInstanceClient.destroy(); + await knexInstanceRemote.destroy(); + } +}); + +describe("Stage 3 Recovery Test", () => { + it("should recover Stage 3 hashes and timestamps and update session state to RECOVERED", async () => { + crashManager1 = gateway1["crashManager"] as CrashManager; + expect(crashManager1).toBeInstanceOf(CrashManager); + + crashManager2 = gateway2["crashManager"] as CrashManager; + expect(crashManager2).toBeInstanceOf(CrashManager); + + const clientSession = createMockSession("5000", "3", true); + const clientSessionData = clientSession.getClientSessionData(); + const sessionId = clientSessionData.id; + + const clientLogKey = getSatpLogKey(sessionId, "stage1", "partial"); + const clientLogEntry: LocalLog = { + sessionId: sessionId, + type: "stage1", + key: clientLogKey, + operation: "partial", + timestamp: new Date().toISOString(), + data: safeStableStringify(clientSessionData), + sequenceNumber: Number(clientSessionData.lastSequenceNumber), + }; + + const mockClientLogRepo = crashManager1["localRepository"]; + await mockClientLogRepo.create(clientLogEntry); + + const serverSession = createMockSession("5000", "3", false); + const serverSessionData = serverSession.getServerSessionData(); + + serverSessionData.id = sessionId; + + const serverLogKey = getSatpLogKey(sessionId, "stage1", "done"); + const serverLogEntry: LocalLog = { + sessionId: sessionId, + type: "stage1", + key: serverLogKey, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockServerLogRepo = crashManager2["localRepository"]; + await mockServerLogRepo.create(serverLogEntry); + + await crashManager1.recoverSessions(); + await crashManager2.recoverSessions(); + + await new Promise((resolve) => setTimeout(resolve, 5000)); + const updatedSessionClient = crashManager1["sessions"].get(sessionId); + const updatedSessionDataClient = + updatedSessionClient?.getClientSessionData(); + + const updatedSessionServer = crashManager2["sessions"].get(sessionId); + const updatedSessionDataServer = + updatedSessionServer?.getServerSessionData(); + + expect(updatedSessionDataClient).toBeDefined(); + expect(updatedSessionDataClient?.state).toBe(State.RECOVERED); + + expect(updatedSessionDataClient?.hashes?.stage3).toEqual( + updatedSessionDataServer?.hashes?.stage3, + ); + + expect( + updatedSessionDataClient?.hashes?.stage3 + ?.commitPreparationRequestMessageHash, + ).toBe("h12"); + + expect( + updatedSessionDataClient?.hashes?.stage3?.commitReadyResponseMessageHash, + ).toBe("h13"); + + expect(updatedSessionDataClient?.processedTimestamps?.stage3).toEqual( + updatedSessionDataServer?.processedTimestamps?.stage3, + ); + + expect( + updatedSessionDataClient?.processedTimestamps?.stage3 + ?.commitPreparationRequestMessageTimestamp, + ).toBeDefined(); + + expect( + updatedSessionDataClient?.processedTimestamps?.stage3 + ?.commitReadyResponseMessageTimestamp, + ).toBeDefined(); + + expect(updatedSessionDataClient?.signatures?.stage3).toEqual( + updatedSessionDataServer?.signatures?.stage3, + ); + + expect( + updatedSessionDataClient?.signatures?.stage3 + ?.commitPreparationRequestMessageSignature, + ).toBe("sig_h12"); + + expect( + updatedSessionDataClient?.signatures?.stage3 + ?.commitReadyResponseMessageSignature, + ).toBe("sig_h13"); + + expect(updatedSessionDataClient?.hashes?.stage3).toEqual( + updatedSessionDataServer?.hashes?.stage3, + ); + expect(updatedSessionDataClient?.processedTimestamps?.stage3).toEqual( + updatedSessionDataServer?.processedTimestamps?.stage3, + ); + expect(updatedSessionDataClient?.signatures?.stage3).toEqual( + updatedSessionDataServer?.signatures?.stage3, + ); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-0.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-0.test.ts new file mode 100644 index 0000000000..8916686578 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-0.test.ts @@ -0,0 +1,426 @@ +import "jest-extended"; +import { Secp256k1Keys } from "@hyperledger/cactus-common"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { + LocalLog, + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + pruneDockerAllIfGithubAction, + Containers, +} from "@hyperledger/cactus-test-tooling"; +import { BesuTestEnvironment, FabricTestEnvironment } from "../../test-utils"; +import { + AssetSchema, + ClaimFormat, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { getSatpLogKey } from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { + SATPGatewayConfig, + PluginFactorySATPGateway, + SATPGateway, +} from "../../../../main/typescript"; +import { + IPluginFactoryOptions, + PluginImportType, +} from "@hyperledger/cactus-core-api"; +import { bufArray2HexStr } from "../../../../main/typescript/gateway-utils"; +import { + knexClientConnection, + knexSourceRemoteConnection, + knexTargetRemoteConnection, + knexServerConnection, +} from "../../knex.config"; +import { LogLevelDesc, LoggerProvider } from "@hyperledger/cactus-common"; +import { Knex, knex } from "knex"; +import { create } from "@bufbuild/protobuf"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { + MessageStagesHashesSchema, + Stage0HashesSchema, + State, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; +import SATPInteractionFabric from "../../fabric/satp-erc20-interact.json"; +import SATPInteractionBesu from "../../../solidity/satp-erc20-interact.json"; +import { EvmAsset } from "../../../../main/typescript/core/stage-services/satp-bridge/types/evm-asset"; +import { FabricAsset } from "../../../../main/typescript/core/stage-services/satp-bridge/types/fabric-asset"; +import { SATPBridgesManager } from "../../../../main/typescript/gol/satp-bridges-manager"; + +let fabricEnv: FabricTestEnvironment; +let besuEnv: BesuTestEnvironment; +let knexInstanceClient: Knex; +let knexInstanceServer: Knex; +let knexInstanceRemote1: Knex; +let knexInstanceRemote2: Knex; + +let gateway1: SATPGateway; +let gateway2: SATPGateway; +const bridge_id = + "x509::/OU=org2/OU=client/OU=department1/CN=bridge::/C=UK/ST=Hampshire/L=Hursley/O=org2.example.com/CN=ca.org2.example.com"; + +let crashManager1: CrashManager; +let crashManager2: CrashManager; +let bridgesManager: SATPBridgesManager; +const sessionId = uuidv4(); +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); +const logLevel: LogLevelDesc = "DEBUG"; +const log = LoggerProvider.getOrCreate({ + level: logLevel, + label: "Rollback-stage-0", +}); +const FABRIC_ASSET_ID = uuidv4(); +const BESU_ASSET_ID = uuidv4(); + +// mock stage-0 rollback +const createMockSession = ( + maxTimeout: string, + maxRetries: string, + isClient: boolean, +): SATPSession => { + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: !isClient, + client: isClient, + }); + + const sessionData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.lockAssertionExpiration = BigInt(99999); + sessionData.receiverContractOntology = "MOCK_RECEIVER_CONTRACT_ONTOLOGY"; + sessionData.senderContractOntology = "MOCK_SENDER_CONTRACT_ONTOLOGY"; + sessionData.sourceLedgerAssetId = "MOCK_SOURCE_LEDGER_ASSET_ID"; + sessionData.state = State.RECOVERING; + sessionData.lastSequenceNumber = isClient ? BigInt(1) : BigInt(2); + sessionData.hashes = create(MessageStagesHashesSchema, { + stage0: isClient + ? create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + }) + : create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + }), + }); + if (isClient) { + sessionData.senderAsset = create(AssetSchema, { + tokenId: BESU_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + } + if (!isClient) { + sessionData.receiverAsset = create(AssetSchema, { + tokenId: FABRIC_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + } + + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + + return mockSession; +}; + +beforeAll(async () => { + pruneDockerAllIfGithubAction({ logLevel }) + .then(() => { + log.info("Pruning throw OK"); + }) + .catch(async () => { + await Containers.logDiagnostics({ logLevel }); + fail("Pruning didn't throw OK"); + }); + + { + const satpContractName = "satp-contract"; + fabricEnv = await FabricTestEnvironment.setupTestEnvironment( + satpContractName, + bridge_id, + logLevel, + ); + log.info("Fabric Ledger started successfully"); + + await fabricEnv.deployAndSetupContracts(ClaimFormat.DEFAULT); + } + + { + const erc20TokenContract = "SATPContract"; + const contractNameWrapper = "SATPWrapperContract"; + + besuEnv = await BesuTestEnvironment.setupTestEnvironment( + erc20TokenContract, + contractNameWrapper, + logLevel, + ); + log.info("Besu Ledger started successfully"); + + await besuEnv.deployAndSetupContracts(ClaimFormat.DEFAULT); + } + + bridgesManager = new SATPBridgesManager({ + logLevel: "DEBUG", + networks: [besuEnv.besuConfig, fabricEnv.fabricConfig], + supportedDLTs: [SupportedChain.BESU, SupportedChain.FABRIC], + }); +}); + +afterAll(async () => { + if (crashManager1 || crashManager2) { + crashManager1.stopScheduler(); + crashManager1.localRepository.destroy(); + crashManager1.remoteRepository.destroy(); + + crashManager2.stopScheduler(); + crashManager2.localRepository.destroy(); + crashManager2.remoteRepository.destroy(); + } + if ( + knexInstanceClient || + knexInstanceServer || + knexInstanceRemote1 || + knexInstanceRemote2 + ) { + await knexInstanceClient.destroy(); + await knexInstanceServer.destroy(); + await knexInstanceRemote1.destroy(); + await knexInstanceRemote2.destroy(); + } + + if (gateway1) { + await gateway1.shutdown(); + } + + if (gateway2) { + await gateway2.shutdown(); + } + + await besuEnv.tearDown(); + await fabricEnv.tearDown(); + + await pruneDockerAllIfGithubAction({ logLevel }) + .then(() => { + log.info("Pruning throw OK"); + }) + .catch(async () => { + await Containers.logDiagnostics({ logLevel }); + fail("Pruning didn't throw OK"); + }); +}); + +describe("Rollback Test stage 0", () => { + it("should initiate stage-0 rollback strategy", async () => { + const besuAsset: EvmAsset = { + tokenId: BESU_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: Number(100), + owner: besuEnv.firstHighNetWorthAccount, + contractName: besuEnv.erc20TokenContract, + contractAddress: besuEnv.assetContractAddress, + ontology: JSON.stringify(SATPInteractionBesu), + }; + const besuReceipt = await bridgesManager + .getBridge(SupportedChain.BESU) + .wrapAsset(besuAsset); + expect(besuReceipt).toBeDefined(); + log.info(`Besu Asset Wrapped: ${besuReceipt}`); + + const fabricAsset: FabricAsset = { + tokenId: FABRIC_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: Number(100), + owner: fabricEnv.clientId, + mspId: "Org1MSP", + channelName: fabricEnv.fabricChannelName, + contractName: fabricEnv.satpContractName, + ontology: JSON.stringify(SATPInteractionFabric), + }; + const fabricReceipt = await bridgesManager + .getBridge(SupportedChain.FABRIC) + .wrapAsset(fabricAsset); + expect(fabricReceipt).toBeDefined(); + log.info(`Fabric Asset Wrapped: ${fabricReceipt}`); + + const factoryOptions: IPluginFactoryOptions = { + pluginImportType: PluginImportType.Local, + }; + const factory = new PluginFactorySATPGateway(factoryOptions); + + const gateway1KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + const gateway2KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + + const gatewayIdentity1: GatewayIdentity = { + id: "mockID-1", + name: "CustomGateway1", + pubKey: bufArray2HexStr(gateway1KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + gatewayServerPort: 3005, + gatewayClientPort: 3001, + gatewayOpenAPIPort: 3002, + }; + + const gatewayIdentity2: GatewayIdentity = { + id: "mockID-2", + name: "CustomGateway2", + pubKey: bufArray2HexStr(gateway2KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.FABRIC], + proofID: "mockProofID11", + address: "http://localhost" as Address, + gatewayServerPort: 3225, + gatewayClientPort: 3211, + gatewayOpenAPIPort: 4210, + }; + + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + + knexInstanceRemote1 = knex(knexSourceRemoteConnection); + await knexInstanceRemote1.migrate.latest(); + + const options1: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity1, + counterPartyGateways: [gatewayIdentity2], + keyPair: gateway1KeyPair, + bridgesConfig: [besuEnv.besuConfig], + knexLocalConfig: knexClientConnection, + knexRemoteConfig: knexSourceRemoteConnection, + enableCrashManager: true, + }; + + knexInstanceServer = knex(knexServerConnection); + await knexInstanceServer.migrate.latest(); + + knexInstanceRemote2 = knex(knexTargetRemoteConnection); + await knexInstanceRemote2.migrate.latest(); + + const options2: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity2, + counterPartyGateways: [gatewayIdentity1], + keyPair: gateway2KeyPair, + bridgesConfig: [fabricEnv.fabricConfig], + knexLocalConfig: knexServerConnection, + knexRemoteConfig: knexTargetRemoteConnection, + enableCrashManager: true, + }; + + gateway1 = (await factory.create(options1)) as SATPGateway; + expect(gateway1).toBeInstanceOf(SATPGateway); + await gateway1.startup(); + + gateway2 = (await factory.create(options2)) as SATPGateway; + expect(gateway2).toBeInstanceOf(SATPGateway); + await gateway2.startup(); + + crashManager1 = gateway1["crashManager"] as CrashManager; + expect(crashManager1).toBeInstanceOf(CrashManager); + + crashManager2 = gateway2["crashManager"] as CrashManager; + + expect(crashManager2).toBeInstanceOf(CrashManager); + + const initiateRollbackSpy1 = jest.spyOn(crashManager1, "initiateRollback"); + + const clientSession = createMockSession("5000", "3", true); + const serverSession = createMockSession("5000", "3", false); + + const clientSessionData = clientSession.getClientSessionData(); + const serverSessionData = serverSession.getServerSessionData(); + + const key1 = getSatpLogKey(sessionId, "type", "operation1"); + const mockLogEntry1: LocalLog = { + sessionId: sessionId, + type: "type", + key: key1, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(clientSessionData), + sequenceNumber: Number(clientSessionData.lastSequenceNumber), + }; + + const mockLogRepository1 = crashManager1["localRepository"]; + await mockLogRepository1.create(mockLogEntry1); + + const key2 = getSatpLogKey(sessionId, "type2", "done"); + const mockLogEntry2: LocalLog = { + sessionId: sessionId, + type: "type2", + key: key2, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockLogRepository2 = crashManager2["localRepository"]; + await mockLogRepository2.create(mockLogEntry2); + + crashManager1.sessions.set(sessionId, clientSession); + crashManager2.sessions.set(sessionId, serverSession); + + const rollbackStatus = await crashManager1.initiateRollback( + clientSession, + true, + ); + expect(initiateRollbackSpy1).toHaveBeenCalled(); + expect(rollbackStatus).toBe(true); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-1.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-1.test.ts new file mode 100644 index 0000000000..34910e54de --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-1.test.ts @@ -0,0 +1,354 @@ +import "jest-extended"; +import { Secp256k1Keys } from "@hyperledger/cactus-common"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { + LocalLog, + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + pruneDockerAllIfGithubAction, + Containers, +} from "@hyperledger/cactus-test-tooling"; +import { + AssetSchema, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { getSatpLogKey } from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { + SATPGatewayConfig, + PluginFactorySATPGateway, + SATPGateway, +} from "../../../../main/typescript"; +import { + IPluginFactoryOptions, + PluginImportType, +} from "@hyperledger/cactus-core-api"; +import { bufArray2HexStr } from "../../../../main/typescript/gateway-utils"; +import { + knexClientConnection, + knexSourceRemoteConnection, + knexTargetRemoteConnection, + knexServerConnection, +} from "../../knex.config"; +import { LogLevelDesc, LoggerProvider } from "@hyperledger/cactus-common"; +import { Knex, knex } from "knex"; +import { create } from "@bufbuild/protobuf"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { + MessageStagesHashesSchema, + Stage0HashesSchema, + Stage1HashesSchema, + State, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; + +let knexInstanceClient: Knex; +let knexInstanceServer: Knex; +let knexInstanceRemote1: Knex; +let knexInstanceRemote2: Knex; + +let gateway1: SATPGateway; +let gateway2: SATPGateway; + +let crashManager1: CrashManager; +let crashManager2: CrashManager; + +const sessionId = uuidv4(); +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); +const logLevel: LogLevelDesc = "DEBUG"; +const log = LoggerProvider.getOrCreate({ + level: logLevel, + label: "Rollback-stage-1", +}); +const FABRIC_ASSET_ID = uuidv4(); +const BESU_ASSET_ID = uuidv4(); + +// mock stage-1 rollback +const createMockSession = ( + maxTimeout: string, + maxRetries: string, + isClient: boolean, +): SATPSession => { + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: !isClient, + client: isClient, + }); + + const sessionData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.lockAssertionExpiration = BigInt(99999); + sessionData.receiverContractOntology = "MOCK_RECEIVER_CONTRACT_ONTOLOGY"; + sessionData.senderContractOntology = "MOCK_SENDER_CONTRACT_ONTOLOGY"; + sessionData.sourceLedgerAssetId = "MOCK_SOURCE_LEDGER_ASSET_ID"; + sessionData.state = State.RECOVERING; + sessionData.lastSequenceNumber = isClient ? BigInt(1) : BigInt(2); + sessionData.hashes = create(MessageStagesHashesSchema, { + stage0: create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + preSatpTransferRequestMessageHash: "h3", + preSatpTransferResponseMessageHash: "h4", + }), + stage1: isClient + ? create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h5", + }) + : create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h5", + transferProposalReceiptMessageHash: "h6", + }), + }); + if (isClient) { + sessionData.senderAsset = create(AssetSchema, { + tokenId: BESU_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + } + if (!isClient) { + sessionData.receiverAsset = create(AssetSchema, { + tokenId: FABRIC_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + } + + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + + return mockSession; +}; + +beforeAll(async () => { + pruneDockerAllIfGithubAction({ logLevel }) + .then(() => { + log.info("Pruning throw OK"); + }) + .catch(async () => { + await Containers.logDiagnostics({ logLevel }); + fail("Pruning didn't throw OK"); + }); +}); + +afterAll(async () => { + if (crashManager1 || crashManager2) { + crashManager1.stopScheduler(); + crashManager1.localRepository.destroy(); + crashManager1.remoteRepository.destroy(); + + crashManager2.stopScheduler(); + crashManager2.localRepository.destroy(); + crashManager2.remoteRepository.destroy(); + } + if ( + knexInstanceClient || + knexInstanceServer || + knexInstanceRemote1 || + knexInstanceRemote2 + ) { + await knexInstanceClient.destroy(); + await knexInstanceServer.destroy(); + await knexInstanceRemote1.destroy(); + await knexInstanceRemote2.destroy(); + } + + if (gateway1) { + await gateway1.shutdown(); + } + + if (gateway2) { + await gateway2.shutdown(); + } + + await pruneDockerAllIfGithubAction({ logLevel }) + .then(() => { + log.info("Pruning throw OK"); + }) + .catch(async () => { + await Containers.logDiagnostics({ logLevel }); + fail("Pruning didn't throw OK"); + }); +}); + +describe("Rollback Test stage 1", () => { + it("should initiate stage-0 rollback strategy", async () => { + const factoryOptions: IPluginFactoryOptions = { + pluginImportType: PluginImportType.Local, + }; + const factory = new PluginFactorySATPGateway(factoryOptions); + + const gateway1KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + const gateway2KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + + const gatewayIdentity1: GatewayIdentity = { + id: "mockID-1", + name: "CustomGateway1", + pubKey: bufArray2HexStr(gateway1KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + gatewayServerPort: 3005, + gatewayClientPort: 3001, + gatewayOpenAPIPort: 3002, + }; + + const gatewayIdentity2: GatewayIdentity = { + id: "mockID-2", + name: "CustomGateway2", + pubKey: bufArray2HexStr(gateway2KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.FABRIC], + proofID: "mockProofID11", + address: "http://localhost" as Address, + gatewayServerPort: 3225, + gatewayClientPort: 3211, + gatewayOpenAPIPort: 4210, + }; + + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + + knexInstanceRemote1 = knex(knexSourceRemoteConnection); + await knexInstanceRemote1.migrate.latest(); + + const options1: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity1, + counterPartyGateways: [gatewayIdentity2], + keyPair: gateway1KeyPair, + knexLocalConfig: knexClientConnection, + knexRemoteConfig: knexSourceRemoteConnection, + enableCrashManager: true, + }; + + knexInstanceServer = knex(knexServerConnection); + await knexInstanceServer.migrate.latest(); + + knexInstanceRemote2 = knex(knexTargetRemoteConnection); + await knexInstanceRemote2.migrate.latest(); + + const options2: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity2, + counterPartyGateways: [gatewayIdentity1], + keyPair: gateway2KeyPair, + knexLocalConfig: knexServerConnection, + knexRemoteConfig: knexTargetRemoteConnection, + enableCrashManager: true, + }; + + gateway1 = (await factory.create(options1)) as SATPGateway; + expect(gateway1).toBeInstanceOf(SATPGateway); + await gateway1.startup(); + + gateway2 = (await factory.create(options2)) as SATPGateway; + expect(gateway2).toBeInstanceOf(SATPGateway); + await gateway2.startup(); + + crashManager1 = gateway1["crashManager"] as CrashManager; + expect(crashManager1).toBeInstanceOf(CrashManager); + + crashManager2 = gateway2["crashManager"] as CrashManager; + + expect(crashManager2).toBeInstanceOf(CrashManager); + + const initiateRollbackSpy1 = jest.spyOn(crashManager1, "initiateRollback"); + + const clientSession = createMockSession("5000", "3", true); + const serverSession = createMockSession("5000", "3", false); + + const clientSessionData = clientSession.getClientSessionData(); + const serverSessionData = serverSession.getServerSessionData(); + + const key1 = getSatpLogKey(sessionId, "type", "operation1"); + const mockLogEntry1: LocalLog = { + sessionId: sessionId, + type: "type", + key: key1, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(clientSessionData), + sequenceNumber: Number(clientSessionData.lastSequenceNumber), + }; + + const mockLogRepository1 = crashManager1["localRepository"]; + await mockLogRepository1.create(mockLogEntry1); + + const key2 = getSatpLogKey(sessionId, "type2", "done"); + const mockLogEntry2: LocalLog = { + sessionId: sessionId, + type: "type2", + key: key2, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockLogRepository2 = crashManager2["localRepository"]; + await mockLogRepository2.create(mockLogEntry2); + + crashManager1.sessions.set(sessionId, clientSession); + crashManager2.sessions.set(sessionId, serverSession); + + const rollbackStatus = await crashManager1.initiateRollback( + clientSession, + true, + ); + expect(initiateRollbackSpy1).toHaveBeenCalled(); + expect(rollbackStatus).toBe(true); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-2.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-2.test.ts new file mode 100644 index 0000000000..ff2d23aebd --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-2.test.ts @@ -0,0 +1,427 @@ +import "jest-extended"; +import { Secp256k1Keys } from "@hyperledger/cactus-common"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { + LocalLog, + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + pruneDockerAllIfGithubAction, + Containers, +} from "@hyperledger/cactus-test-tooling"; +import { BesuTestEnvironment, FabricTestEnvironment } from "../../test-utils"; +import { + AssetSchema, + ClaimFormat, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { getSatpLogKey } from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { + SATPGatewayConfig, + PluginFactorySATPGateway, + SATPGateway, +} from "../../../../main/typescript"; +import { + IPluginFactoryOptions, + PluginImportType, +} from "@hyperledger/cactus-core-api"; +import { bufArray2HexStr } from "../../../../main/typescript/gateway-utils"; +import { + knexClientConnection, + knexSourceRemoteConnection, + knexTargetRemoteConnection, + knexServerConnection, +} from "../../knex.config"; +import { LogLevelDesc, LoggerProvider } from "@hyperledger/cactus-common"; +import { Knex, knex } from "knex"; +import { create } from "@bufbuild/protobuf"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { + MessageStagesHashesSchema, + Stage0HashesSchema, + Stage1HashesSchema, + Stage2HashesSchema, + State, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; +import { SATPBridgesManager } from "../../../../main/typescript/gol/satp-bridges-manager"; +import SATPInteractionBesu from "../../../solidity/satp-erc20-interact.json"; +import { EvmAsset } from "../../../../main/typescript/core/stage-services/satp-bridge/types/evm-asset"; + +let besuEnv: BesuTestEnvironment; +let fabricEnv: FabricTestEnvironment; +let knexInstanceClient: Knex; +let knexInstanceServer: Knex; +let knexInstanceRemote1: Knex; +let knexInstanceRemote2: Knex; + +let gateway1: SATPGateway; +let gateway2: SATPGateway; + +let crashManager1: CrashManager; +let crashManager2: CrashManager; +let bridgesManager: SATPBridgesManager; +const sessionId = uuidv4(); +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); +const logLevel: LogLevelDesc = "DEBUG"; +const log = LoggerProvider.getOrCreate({ + level: logLevel, + label: "Rollback-stage-2", +}); +const FABRIC_ASSET_ID = uuidv4(); +const BESU_ASSET_ID = uuidv4(); +const bridge_id = + "x509::/OU=org2/OU=client/OU=department1/CN=bridge::/C=UK/ST=Hampshire/L=Hursley/O=org2.example.com/CN=ca.org2.example.com"; + +// mock stage-2 rollback +const createMockSession = ( + maxTimeout: string, + maxRetries: string, + isClient: boolean, +): SATPSession => { + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: !isClient, + client: isClient, + }); + + const sessionData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.lockAssertionExpiration = BigInt(99999); + sessionData.receiverContractOntology = "MOCK_RECEIVER_CONTRACT_ONTOLOGY"; + sessionData.senderContractOntology = "MOCK_SENDER_CONTRACT_ONTOLOGY"; + sessionData.sourceLedgerAssetId = "MOCK_SOURCE_LEDGER_ASSET_ID"; + sessionData.state = State.RECOVERING; + sessionData.lastSequenceNumber = isClient ? BigInt(1) : BigInt(2); + sessionData.hashes = create(MessageStagesHashesSchema, { + stage0: create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + preSatpTransferRequestMessageHash: "h3", + preSatpTransferResponseMessageHash: "h4", + }), + stage1: create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h5", + transferProposalReceiptMessageHash: "h6", + transferProposalRejectMessageHash: "h7", + transferCommenceRequestMessageHash: "h8", + transferCommenceResponseMessageHash: "h9", + }), + stage2: isClient + ? create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "h10", + }) + : create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "h10", + }), + }); + if (isClient) { + sessionData.senderAsset = create(AssetSchema, { + tokenId: BESU_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + } + if (!isClient) { + sessionData.receiverAsset = create(AssetSchema, { + tokenId: FABRIC_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + } + + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + + return mockSession; +}; + +beforeAll(async () => { + pruneDockerAllIfGithubAction({ logLevel }) + .then(() => { + log.info("Pruning throw OK"); + }) + .catch(async () => { + await Containers.logDiagnostics({ logLevel }); + fail("Pruning didn't throw OK"); + }); + { + const satpContractName = "satp-contract"; + fabricEnv = await FabricTestEnvironment.setupTestEnvironment( + satpContractName, + bridge_id, + logLevel, + ); + log.info("Fabric Ledger started successfully"); + + await fabricEnv.deployAndSetupContracts(ClaimFormat.DEFAULT); + } + + { + const erc20TokenContract = "SATPContract"; + const contractNameWrapper = "SATPWrapperContract"; + + besuEnv = await BesuTestEnvironment.setupTestEnvironment( + erc20TokenContract, + contractNameWrapper, + logLevel, + ); + log.info("Besu Ledger started successfully"); + + await besuEnv.deployAndSetupContracts(ClaimFormat.DEFAULT); + } + + bridgesManager = new SATPBridgesManager({ + logLevel: "DEBUG", + networks: [besuEnv.besuConfig, fabricEnv.fabricConfig], + supportedDLTs: [SupportedChain.BESU, SupportedChain.FABRIC], + }); +}); + +afterAll(async () => { + if (crashManager1 || crashManager2) { + crashManager1.stopScheduler(); + crashManager1.localRepository.destroy(); + crashManager1.remoteRepository.destroy(); + + crashManager2.stopScheduler(); + crashManager2.localRepository.destroy(); + crashManager2.remoteRepository.destroy(); + } + if ( + knexInstanceClient || + knexInstanceServer || + knexInstanceRemote1 || + knexInstanceRemote2 + ) { + await knexInstanceClient.destroy(); + await knexInstanceServer.destroy(); + await knexInstanceRemote1.destroy(); + await knexInstanceRemote2.destroy(); + } + + if (gateway1) { + await gateway1.shutdown(); + } + + if (gateway2) { + await gateway2.shutdown(); + } + + await besuEnv.tearDown(); + await fabricEnv.tearDown(); + + await pruneDockerAllIfGithubAction({ logLevel }) + .then(() => { + log.info("Pruning throw OK"); + }) + .catch(async () => { + await Containers.logDiagnostics({ logLevel }); + fail("Pruning didn't throw OK"); + }); +}); + +describe("Rollback Test stage 2", () => { + it("should initiate stage-2 rollback strategy", async () => { + const besuAsset: EvmAsset = { + tokenId: BESU_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: Number(100), + owner: besuEnv.firstHighNetWorthAccount, + contractName: besuEnv.erc20TokenContract, + contractAddress: besuEnv.assetContractAddress, + ontology: JSON.stringify(SATPInteractionBesu), + }; + const besuReceipt = await bridgesManager + .getBridge(SupportedChain.BESU) + .wrapAsset(besuAsset); + expect(besuReceipt).toBeDefined(); + log.info(`Besu Asset Wrapped: ${besuReceipt}`); + + const besuReceipt1 = await bridgesManager + .getBridge(SupportedChain.BESU) + .lockAsset(BESU_ASSET_ID, 100); + expect(besuReceipt1).toBeDefined(); + log.info(`Besu Asset locked: ${besuReceipt1}`); + + const factoryOptions: IPluginFactoryOptions = { + pluginImportType: PluginImportType.Local, + }; + const factory = new PluginFactorySATPGateway(factoryOptions); + + const gateway1KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + const gateway2KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + + const gatewayIdentity1: GatewayIdentity = { + id: "mockID-1", + name: "CustomGateway1", + pubKey: bufArray2HexStr(gateway1KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + gatewayServerPort: 3005, + gatewayClientPort: 3001, + gatewayOpenAPIPort: 3002, + }; + + const gatewayIdentity2: GatewayIdentity = { + id: "mockID-2", + name: "CustomGateway2", + pubKey: bufArray2HexStr(gateway2KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.FABRIC], + proofID: "mockProofID11", + address: "http://localhost" as Address, + gatewayServerPort: 3225, + gatewayClientPort: 3211, + gatewayOpenAPIPort: 4210, + }; + + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + + knexInstanceRemote1 = knex(knexSourceRemoteConnection); + await knexInstanceRemote1.migrate.latest(); + + const options1: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity1, + counterPartyGateways: [gatewayIdentity2], + keyPair: gateway1KeyPair, + bridgesConfig: [besuEnv.besuConfig], + knexLocalConfig: knexClientConnection, + knexRemoteConfig: knexSourceRemoteConnection, + enableCrashManager: true, + }; + + knexInstanceServer = knex(knexServerConnection); + await knexInstanceServer.migrate.latest(); + + knexInstanceRemote2 = knex(knexTargetRemoteConnection); + await knexInstanceRemote2.migrate.latest(); + + const options2: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity2, + counterPartyGateways: [gatewayIdentity1], + keyPair: gateway2KeyPair, + bridgesConfig: [fabricEnv.fabricConfig], + knexLocalConfig: knexServerConnection, + knexRemoteConfig: knexTargetRemoteConnection, + enableCrashManager: true, + }; + + gateway1 = (await factory.create(options1)) as SATPGateway; + expect(gateway1).toBeInstanceOf(SATPGateway); + await gateway1.startup(); + + gateway2 = (await factory.create(options2)) as SATPGateway; + expect(gateway2).toBeInstanceOf(SATPGateway); + await gateway2.startup(); + + crashManager1 = gateway1["crashManager"] as CrashManager; + expect(crashManager1).toBeInstanceOf(CrashManager); + + crashManager2 = gateway2["crashManager"] as CrashManager; + + expect(crashManager2).toBeInstanceOf(CrashManager); + + const initiateRollbackSpy1 = jest.spyOn(crashManager1, "initiateRollback"); + + const clientSession = createMockSession("5000", "3", true); + const serverSession = createMockSession("5000", "3", false); + + const clientSessionData = clientSession.getClientSessionData(); + const serverSessionData = serverSession.getServerSessionData(); + + const key1 = getSatpLogKey(sessionId, "type", "operation1"); + const mockLogEntry1: LocalLog = { + sessionId: sessionId, + type: "type", + key: key1, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(clientSessionData), + sequenceNumber: Number(clientSessionData.lastSequenceNumber), + }; + + const mockLogRepository1 = crashManager1["localRepository"]; + await mockLogRepository1.create(mockLogEntry1); + + const key2 = getSatpLogKey(sessionId, "type2", "done"); + const mockLogEntry2: LocalLog = { + sessionId: sessionId, + type: "type2", + key: key2, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockLogRepository2 = crashManager2["localRepository"]; + await mockLogRepository2.create(mockLogEntry2); + + crashManager1.sessions.set(sessionId, clientSession); + crashManager2.sessions.set(sessionId, serverSession); + + const rollbackStatus = await crashManager1.initiateRollback( + clientSession, + true, + ); + expect(initiateRollbackSpy1).toHaveBeenCalled(); + expect(rollbackStatus).toBe(true); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-3.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-3.test.ts new file mode 100644 index 0000000000..06e023809d --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/integration/rollback/rollback-stage-3.test.ts @@ -0,0 +1,470 @@ +import "jest-extended"; +import { Secp256k1Keys } from "@hyperledger/cactus-common"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { + LocalLog, + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + pruneDockerAllIfGithubAction, + Containers, +} from "@hyperledger/cactus-test-tooling"; +import { BesuTestEnvironment, FabricTestEnvironment } from "../../test-utils"; +import { + AssetSchema, + ClaimFormat, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { getSatpLogKey } from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { + SATPGatewayConfig, + PluginFactorySATPGateway, + SATPGateway, +} from "../../../../main/typescript"; +import { + IPluginFactoryOptions, + PluginImportType, +} from "@hyperledger/cactus-core-api"; +import { bufArray2HexStr } from "../../../../main/typescript/gateway-utils"; +import { + knexClientConnection, + knexSourceRemoteConnection, + knexTargetRemoteConnection, + knexServerConnection, +} from "../../knex.config"; +import { LogLevelDesc, LoggerProvider } from "@hyperledger/cactus-common"; +import { Knex, knex } from "knex"; +import { create } from "@bufbuild/protobuf"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import { + MessageStagesHashesSchema, + Stage0HashesSchema, + Stage1HashesSchema, + Stage2HashesSchema, + Stage3HashesSchema, + State, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; +import { SATPBridgesManager } from "../../../../main/typescript/gol/satp-bridges-manager"; +import SATPInteractionFabric from "../../fabric/satp-erc20-interact.json"; +import { FabricAsset } from "../../../../main/typescript/core/stage-services/satp-bridge/types/fabric-asset"; +import { FabricContractInvocationType } from "@hyperledger/cactus-plugin-ledger-connector-fabric"; + +let besuEnv: BesuTestEnvironment; +let fabricEnv: FabricTestEnvironment; +let knexInstanceClient: Knex; +let knexInstanceServer: Knex; +let knexInstanceRemote1: Knex; +let knexInstanceRemote2: Knex; + +let gateway1: SATPGateway; +let gateway2: SATPGateway; + +let crashManager1: CrashManager; +let crashManager2: CrashManager; +let bridgesManager: SATPBridgesManager; +const sessionId = uuidv4(); +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); +const logLevel: LogLevelDesc = "DEBUG"; +const log = LoggerProvider.getOrCreate({ + level: logLevel, + label: "Rollback-stage-3", +}); +const FABRIC_ASSET_ID = uuidv4(); +const BESU_ASSET_ID = uuidv4(); +const bridge_id = + "x509::/OU=org2/OU=client/OU=department1/CN=bridge::/C=UK/ST=Hampshire/L=Hursley/O=org2.example.com/CN=ca.org2.example.com"; + +// mock stage-3 rollback +const createMockSession = ( + maxTimeout: string, + maxRetries: string, + isClient: boolean, +): SATPSession => { + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: !isClient, + client: isClient, + }); + + const sessionData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.lockAssertionExpiration = BigInt(99999); + sessionData.receiverContractOntology = "MOCK_RECEIVER_CONTRACT_ONTOLOGY"; + sessionData.senderContractOntology = "MOCK_SENDER_CONTRACT_ONTOLOGY"; + sessionData.sourceLedgerAssetId = "MOCK_SOURCE_LEDGER_ASSET_ID"; + sessionData.state = State.RECOVERING; + sessionData.lastSequenceNumber = isClient ? BigInt(1) : BigInt(2); + sessionData.hashes = create(MessageStagesHashesSchema, { + stage0: create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + preSatpTransferRequestMessageHash: "h3", + preSatpTransferResponseMessageHash: "h4", + }), + stage1: create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h5", + transferProposalReceiptMessageHash: "h6", + transferProposalRejectMessageHash: "h7", + transferCommenceRequestMessageHash: "h8", + transferCommenceResponseMessageHash: "h9", + }), + stage2: create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "h10", + lockAssertionReceiptMessageHash: "h11", + }), + stage3: isClient + ? create(Stage3HashesSchema, { + commitPreparationRequestMessageHash: "h12", + }) + : create(Stage3HashesSchema, { + commitPreparationRequestMessageHash: "h12", + commitReadyResponseMessageHash: "h13", + }), + }); + if (isClient) { + sessionData.senderAsset = create(AssetSchema, { + tokenId: BESU_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + } + if (!isClient) { + sessionData.receiverAsset = create(AssetSchema, { + tokenId: FABRIC_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + } + + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + + return mockSession; +}; + +beforeAll(async () => { + pruneDockerAllIfGithubAction({ logLevel }) + .then(() => { + log.info("Pruning throw OK"); + }) + .catch(async () => { + await Containers.logDiagnostics({ logLevel }); + fail("Pruning didn't throw OK"); + }); + { + const satpContractName = "satp-contract"; + fabricEnv = await FabricTestEnvironment.setupTestEnvironment( + satpContractName, + bridge_id, + logLevel, + ); + log.info("Fabric Ledger started successfully"); + + await fabricEnv.deployAndSetupContracts(ClaimFormat.DEFAULT); + } + + { + const erc20TokenContract = "SATPContract"; + const contractNameWrapper = "SATPWrapperContract"; + + besuEnv = await BesuTestEnvironment.setupTestEnvironment( + erc20TokenContract, + contractNameWrapper, + logLevel, + ); + log.info("Besu Ledger started successfully"); + + await besuEnv.deployAndSetupContracts(ClaimFormat.DEFAULT); + } + + bridgesManager = new SATPBridgesManager({ + logLevel: "DEBUG", + networks: [besuEnv.besuConfig, fabricEnv.fabricConfig], + supportedDLTs: [SupportedChain.BESU, SupportedChain.FABRIC], + }); +}); + +afterAll(async () => { + if (crashManager1 || crashManager2) { + crashManager1.stopScheduler(); + crashManager1.localRepository.destroy(); + crashManager1.remoteRepository.destroy(); + + crashManager2.stopScheduler(); + crashManager2.localRepository.destroy(); + crashManager2.remoteRepository.destroy(); + } + if ( + knexInstanceClient || + knexInstanceServer || + knexInstanceRemote1 || + knexInstanceRemote2 + ) { + await knexInstanceClient.destroy(); + await knexInstanceServer.destroy(); + await knexInstanceRemote1.destroy(); + await knexInstanceRemote2.destroy(); + } + + if (gateway1) { + await gateway1.shutdown(); + } + + if (gateway2) { + await gateway2.shutdown(); + } + + await besuEnv.tearDown(); + await fabricEnv.tearDown(); + + await pruneDockerAllIfGithubAction({ logLevel }) + .then(() => { + log.info("Pruning throw OK"); + }) + .catch(async () => { + await Containers.logDiagnostics({ logLevel }); + fail("Pruning didn't throw OK"); + }); +}); + +describe("Rollback Test stage 3", () => { + it("should initiate stage-3 rollback strategy", async () => { + const fabricAsset: FabricAsset = { + tokenId: FABRIC_ASSET_ID, + tokenType: TokenType.NONSTANDARD, + amount: Number(100), + owner: fabricEnv.clientId, + mspId: "Org1MSP", + channelName: fabricEnv.fabricChannelName, + contractName: fabricEnv.satpContractName, + ontology: JSON.stringify(SATPInteractionFabric), + }; + const fabricReceipt = await bridgesManager + .getBridge(SupportedChain.FABRIC) + .wrapAsset(fabricAsset); + expect(fabricReceipt).toBeDefined(); + log.info(`Fabric Asset Wrapped: ${fabricReceipt}`); + + const responseMint1 = await fabricEnv.apiClient.runTransactionV1({ + contractName: fabricEnv.satpContractName, + channelName: fabricEnv.fabricChannelName, + params: ["100"], + methodName: "Mint", + invocationType: FabricContractInvocationType.Send, + signingCredential: fabricEnv.fabricSigningCredential, + }); + expect(responseMint1).not.toBeUndefined(); + + log.info( + `Mint 100 amount asset by the owner response: ${JSON.stringify(responseMint1.data)}`, + ); + + const responseApprove = await fabricEnv.apiClient.runTransactionV1({ + contractName: fabricEnv.satpContractName, + channelName: fabricEnv.fabricChannelName, + params: [bridge_id, "100"], + methodName: "Approve", + invocationType: FabricContractInvocationType.Send, + signingCredential: fabricEnv.fabricSigningCredential, + }); + + expect(responseApprove).not.toBeUndefined(); + log.info( + `Approve 100 amount asset by the owner response: ${JSON.stringify(responseApprove.data)}`, + ); + + const responseLock = await bridgesManager + .getBridge(SupportedChain.FABRIC) + .lockAsset(FABRIC_ASSET_ID, 100); + + expect(responseLock).not.toBeUndefined(); + log.info(`Lock asset response: ${JSON.stringify(responseLock)}`); + + const responseMint = await bridgesManager + .getBridge(SupportedChain.FABRIC) + .mintAsset(FABRIC_ASSET_ID, 100); + + log.info(`Mint asset response: ${JSON.stringify(responseMint)}`); + + const factoryOptions: IPluginFactoryOptions = { + pluginImportType: PluginImportType.Local, + }; + const factory = new PluginFactorySATPGateway(factoryOptions); + + const gateway1KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + const gateway2KeyPair = Secp256k1Keys.generateKeyPairsBuffer(); + + const gatewayIdentity1: GatewayIdentity = { + id: "mockID-1", + name: "CustomGateway1", + pubKey: bufArray2HexStr(gateway1KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + gatewayServerPort: 3005, + gatewayClientPort: 3001, + gatewayOpenAPIPort: 3002, + }; + + const gatewayIdentity2: GatewayIdentity = { + id: "mockID-2", + name: "CustomGateway2", + pubKey: bufArray2HexStr(gateway2KeyPair.publicKey), + version: [ + { + Core: "v02", + Architecture: "v02", + Crash: "v02", + }, + ], + supportedDLTs: [SupportedChain.FABRIC], + proofID: "mockProofID11", + address: "http://localhost" as Address, + gatewayServerPort: 3225, + gatewayClientPort: 3211, + gatewayOpenAPIPort: 4210, + }; + + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + + knexInstanceRemote1 = knex(knexSourceRemoteConnection); + await knexInstanceRemote1.migrate.latest(); + + const options1: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity1, + counterPartyGateways: [gatewayIdentity2], + keyPair: gateway1KeyPair, + bridgesConfig: [besuEnv.besuConfig], + knexLocalConfig: knexClientConnection, + knexRemoteConfig: knexSourceRemoteConnection, + enableCrashManager: true, + }; + + knexInstanceServer = knex(knexServerConnection); + await knexInstanceServer.migrate.latest(); + + knexInstanceRemote2 = knex(knexTargetRemoteConnection); + await knexInstanceRemote2.migrate.latest(); + + const options2: SATPGatewayConfig = { + logLevel: "DEBUG", + gid: gatewayIdentity2, + counterPartyGateways: [gatewayIdentity1], + keyPair: gateway2KeyPair, + bridgesConfig: [fabricEnv.fabricConfig], + knexLocalConfig: knexServerConnection, + knexRemoteConfig: knexTargetRemoteConnection, + enableCrashManager: true, + }; + + gateway1 = (await factory.create(options1)) as SATPGateway; + expect(gateway1).toBeInstanceOf(SATPGateway); + await gateway1.startup(); + + gateway2 = (await factory.create(options2)) as SATPGateway; + expect(gateway2).toBeInstanceOf(SATPGateway); + await gateway2.startup(); + + crashManager1 = gateway1["crashManager"] as CrashManager; + expect(crashManager1).toBeInstanceOf(CrashManager); + + crashManager2 = gateway2["crashManager"] as CrashManager; + + expect(crashManager2).toBeInstanceOf(CrashManager); + + const initiateRollbackSpy1 = jest.spyOn(crashManager1, "initiateRollback"); + + const clientSession = createMockSession("5000", "3", true); + const serverSession = createMockSession("5000", "3", false); + + const clientSessionData = clientSession.getClientSessionData(); + const serverSessionData = serverSession.getServerSessionData(); + + const key1 = getSatpLogKey(sessionId, "type", "operation1"); + const mockLogEntry1: LocalLog = { + sessionId: sessionId, + type: "type", + key: key1, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(clientSessionData), + sequenceNumber: Number(clientSessionData.lastSequenceNumber), + }; + + const mockLogRepository1 = crashManager1["localRepository"]; + await mockLogRepository1.create(mockLogEntry1); + + const key2 = getSatpLogKey(sessionId, "type2", "done"); + const mockLogEntry2: LocalLog = { + sessionId: sessionId, + type: "type2", + key: key2, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockLogRepository2 = crashManager2["localRepository"]; + await mockLogRepository2.create(mockLogEntry2); + + crashManager1.sessions.set(sessionId, clientSession); + crashManager2.sessions.set(sessionId, serverSession); + + const rollbackStatus = await crashManager1.initiateRollback( + clientSession, + true, + ); + expect(initiateRollbackSpy1).toHaveBeenCalled(); + expect(rollbackStatus).toBe(true); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/cron-job.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/cron-job.test.ts new file mode 100644 index 0000000000..e80052692c --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/cron-job.test.ts @@ -0,0 +1,269 @@ +import "jest-extended"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { + Secp256k1Keys, + JsObjectSigner, + IJsObjectSignerOptions, +} from "@hyperledger/cactus-common"; +import { ICrashRecoveryManagerOptions } from "../../../../main/typescript/gol/crash-manager"; +import { + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + AssetSchema, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { + knexClientConnection, + knexSourceRemoteConnection, +} from "../../knex.config"; +import { + bufArray2HexStr, + getSatpLogKey, +} from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { + GatewayOrchestrator, + IGatewayOrchestratorOptions, +} from "../../../../main/typescript/gol/gateway-orchestrator"; +import { + ISATPBridgesOptions, + SATPBridgesManager, +} from "../../../../main/typescript/gol/satp-bridges-manager"; +import { create } from "@bufbuild/protobuf"; +import { KnexLocalLogRepository } from "../../../../main/typescript/repository/knex-local-log-repository"; +import { KnexRemoteLogRepository } from "../../../../main/typescript/repository/knex-remote-log-repository"; +import { + ILocalLogRepository, + IRemoteLogRepository, +} from "../../../../main/typescript/repository/interfaces/repository"; +import { + SATP_ARCHITECTURE_VERSION, + SATP_CORE_VERSION, + SATP_CRASH_VERSION, +} from "../../../../main/typescript/core/constants"; +import { LocalLog } from "../../../../main/typescript/core/types"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; +import knex, { Knex } from "knex"; + +let crashManager: CrashManager; +let localRepository: ILocalLogRepository; +let remoteRepository: IRemoteLogRepository; +let knexInstanceClient: Knex; +let knexInstanceRemote: Knex; +const sessionId = uuidv4(); +const createMockSession = ( + maxTimeout: string, + maxRetries: string, +): SATPSession => { + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: true, + client: true, + }); + + const sessionData = mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.lockAssertionExpiration = BigInt(99999); + sessionData.receiverContractOntology = "MOCK_RECEIVER_CONTRACT_ONTOLOGY"; + sessionData.senderContractOntology = "MOCK_SENDER_CONTRACT_ONTOLOGY"; + sessionData.sourceLedgerAssetId = "MOCK_SOURCE_LEDGER_ASSET_ID"; + sessionData.senderAsset = create(AssetSchema, { + tokenId: "MOCK_TOKEN_ID", + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + sessionData.receiverAsset = create(AssetSchema, { + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + + return mockSession; +}; + +beforeAll(async () => { + localRepository = new KnexLocalLogRepository(knexClientConnection); + remoteRepository = new KnexRemoteLogRepository(knexSourceRemoteConnection); + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + + knexInstanceRemote = knex(knexSourceRemoteConnection); + await knexInstanceRemote.migrate.latest(); + + const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); + const signerOptions: IJsObjectSignerOptions = { + privateKey: bufArray2HexStr(keyPairs.privateKey), + logLevel: "debug", + }; + const signer = new JsObjectSigner(signerOptions); + + const gatewayIdentity: GatewayIdentity = { + id: "mockID-1", + name: "CustomGateway", + version: [ + { + Core: SATP_CORE_VERSION, + Architecture: SATP_ARCHITECTURE_VERSION, + Crash: SATP_CRASH_VERSION, + }, + ], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + }; + + const orchestratorOptions: IGatewayOrchestratorOptions = { + logLevel: "DEBUG", + localGateway: gatewayIdentity, + counterPartyGateways: [], + signer: signer, + }; + const gatewayOrchestrator = new GatewayOrchestrator(orchestratorOptions); + + const bridgesManagerOptions: ISATPBridgesOptions = { + logLevel: "DEBUG", + supportedDLTs: gatewayIdentity.supportedDLTs, + networks: [], + }; + const bridgesManager = new SATPBridgesManager(bridgesManagerOptions); + + const crashOptions: ICrashRecoveryManagerOptions = { + instanceId: "test-instance", + logLevel: "DEBUG", + bridgeConfig: bridgesManager, + orchestrator: gatewayOrchestrator, + localRepository: localRepository, + remoteRepository: remoteRepository, + signer: signer, + pubKey: bufArray2HexStr(keyPairs.publicKey), + }; + + crashManager = new CrashManager(crashOptions); +}); + +afterAll(async () => { + if (crashManager) { + crashManager.stopScheduler(); + crashManager.localRepository.destroy(); + crashManager.remoteRepository.destroy(); + } + if (knexInstanceClient || knexInstanceRemote) { + await knexInstanceClient.destroy(); + await knexInstanceRemote.destroy(); + } +}); + +describe("CrashManager Tests", () => { + it("Default config test", async () => { + const mock = jest + .spyOn(crashManager, "checkAndResolveCrashes") + .mockResolvedValue(); + const session = createMockSession("40000", "3"); + const serverSessionData = session.getServerSessionData(); + + serverSessionData.id = sessionId; + + const key = getSatpLogKey(sessionId, "type2", "done"); + + const log: LocalLog = { + sessionId: sessionId, + type: "type2", + key: key, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockServerLog = crashManager["localRepository"]; + await mockServerLog.create(log); + + await crashManager.recoverSessions(); + + // 15 seconds (3 cron intervals of 5 seconds each) + await new Promise((resolve) => setTimeout(resolve, 15000)); + + expect(mock).toHaveBeenCalledTimes(3); + + mock.mockRestore(); + }); + + it("Custom config test", async () => { + const customCrashManager = new CrashManager({ + ...crashManager.options, + healthCheckInterval: "*/2 * * * * *", + }); + const mock = jest + .spyOn(customCrashManager, "checkAndResolveCrashes") + .mockResolvedValue(); + + const session = createMockSession("15000", "3"); + const serverSessionData = session.getServerSessionData(); + + serverSessionData.id = sessionId; + + const key = getSatpLogKey(sessionId, "type3", "done"); + + const log: LocalLog = { + sessionId: sessionId, + type: "type3", + key: key, + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(serverSessionData), + sequenceNumber: Number(serverSessionData.lastSequenceNumber), + }; + + const mockServerLog = customCrashManager["localRepository"]; + await mockServerLog.create(log); + + await customCrashManager.recoverSessions(); + + // 6 seconds (3 cron intervals of 2 seconds each) + await new Promise((resolve) => setTimeout(resolve, 6000)); + + expect(mock).toHaveBeenCalledTimes(3); + customCrashManager.stopScheduler(); + mock.mockRestore(); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/rollback-factory.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/rollback-factory.test.ts new file mode 100644 index 0000000000..5d85e544d1 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/rollback-factory.test.ts @@ -0,0 +1,239 @@ +import "jest-extended"; +import { RollbackStrategyFactory } from "../../../../main/typescript/core/crash-management/rollback/rollback-strategy-factory"; +import { Stage0RollbackStrategy } from "../../../../main/typescript/core/crash-management/rollback/stage0-rollback-strategy"; +import { Stage1RollbackStrategy } from "../../../../main/typescript/core/crash-management/rollback/stage1-rollback-strategy"; +import { Stage2RollbackStrategy } from "../../../../main/typescript/core/crash-management/rollback/stage2-rollback-strategy"; +import { Stage3RollbackStrategy } from "../../../../main/typescript/core/crash-management/rollback/stage3-rollback-strategy"; +import { + ILocalLogRepository, + IRemoteLogRepository, +} from "../../../../main/typescript/repository/interfaces/repository"; +import { SATPBridgesManager } from "../../../../main/typescript/gol/satp-bridges-manager"; +import { + knexClientConnection, + knexSourceRemoteConnection, +} from "../../knex.config"; +import { create } from "@bufbuild/protobuf"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { + MessageStagesHashes, + MessageStagesHashesSchema, + Stage0HashesSchema, + Stage1HashesSchema, + Stage2HashesSchema, + Stage3HashesSchema, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; +import { SupportedChain } from "../../../../main/typescript/core/types"; +import { + JsObjectSigner, + LogLevelDesc, + LoggerProvider, + Secp256k1Keys, +} from "@hyperledger/cactus-common"; +import { SATPLogger } from "../../../../main/typescript/logging"; +import { KnexLocalLogRepository as LocalLogRepository } from "../../../../main/typescript/repository/knex-local-log-repository"; +import { KnexRemoteLogRepository as RemoteLogRepository } from "../../../../main/typescript/repository/knex-remote-log-repository"; + +const createMockSession = (hashes?: MessageStagesHashes): SATPSession => { + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: false, + client: true, + }); + + const sessionData = mockSession.getClientSessionData(); + sessionData.id = "mock-session-id"; + sessionData.hashes = hashes; + + return mockSession; +}; +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); +const signer = new JsObjectSigner({ + privateKey: keyPairs.privateKey, +}); +const logLevel: LogLevelDesc = "DEBUG"; +const log = LoggerProvider.getOrCreate({ + level: logLevel, + label: "RollbackStrategyFactory", +}); +let localRepository: ILocalLogRepository; +let remoteRepository: IRemoteLogRepository; +let dbLogger: SATPLogger; + +describe("RollbackStrategyFactory Tests", () => { + let factory: RollbackStrategyFactory; + let bridgesManager: SATPBridgesManager; + + beforeAll(async () => { + localRepository = new LocalLogRepository(knexClientConnection); + remoteRepository = new RemoteLogRepository(knexSourceRemoteConnection); + dbLogger = new SATPLogger({ + localRepository, + remoteRepository, + signer, + pubKey: Buffer.from(keyPairs.publicKey).toString("hex"), + }); + + bridgesManager = new SATPBridgesManager({ + logLevel: "DEBUG", + networks: [], + supportedDLTs: [SupportedChain.BESU, SupportedChain.FABRIC], + }); + + factory = new RollbackStrategyFactory(bridgesManager, log, dbLogger); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it("should return Stage0RollbackStrategy if no hashes are present", () => { + const hashes = create(MessageStagesHashesSchema, { + stage0: undefined, + stage1: undefined, + stage2: undefined, + stage3: undefined, + }); + const mockSession = createMockSession(hashes); + + const strategy = factory.createStrategy(mockSession); + expect(strategy).toBeInstanceOf(Stage0RollbackStrategy); + }); + + it("should return Stage0RollbackStrategy if Stage0 is partially complete", () => { + const partialStage0 = create(Stage0HashesSchema, { + newSessionRequestMessageHash: "hash1", + // missing other Stage0 hashes + }); + const hashes = create(MessageStagesHashesSchema, { + stage0: partialStage0, + }); + const mockSession = createMockSession(hashes); + + const strategy = factory.createStrategy(mockSession); + expect(strategy).toBeInstanceOf(Stage0RollbackStrategy); + }); + + it("should return Stage1RollbackStrategy if Stage0 is complete but Stage1 is partially complete", () => { + const completeStage0 = create(Stage0HashesSchema, { + newSessionRequestMessageHash: "hash1", + newSessionResponseMessageHash: "hash2", + preSatpTransferRequestMessageHash: "hash3", + preSatpTransferResponseMessageHash: "hash4", + }); + const partialStage1 = create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "hash1", + // missing other Stage1 hashes + }); + const hashes = create(MessageStagesHashesSchema, { + stage0: completeStage0, + stage1: partialStage1, + }); + const mockSession = createMockSession(hashes); + + const strategy = factory.createStrategy(mockSession); + expect(strategy).toBeInstanceOf(Stage1RollbackStrategy); + }); + + it("should return Stage2RollbackStrategy if Stage0 and Stage1 are complete but Stage2 is partially complete", () => { + const completeStage0 = create(Stage0HashesSchema, { + newSessionRequestMessageHash: "hash1", + newSessionResponseMessageHash: "hash2", + preSatpTransferRequestMessageHash: "hash3", + preSatpTransferResponseMessageHash: "hash4", + }); + const completeStage1 = create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "hash1", + transferProposalReceiptMessageHash: "hash2", + transferProposalRejectMessageHash: "hash3", + transferCommenceRequestMessageHash: "hash4", + transferCommenceResponseMessageHash: "hash5", + }); + const partialStage2 = create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "hash1", + // missing lockAssertionReceiptMessageHash + }); + const hashes = create(MessageStagesHashesSchema, { + stage0: completeStage0, + stage1: completeStage1, + stage2: partialStage2, + }); + const mockSession = createMockSession(hashes); + + const strategy = factory.createStrategy(mockSession); + expect(strategy).toBeInstanceOf(Stage2RollbackStrategy); + }); + + it("should return Stage3RollbackStrategy if Stage0, Stage1, and Stage2 are complete but Stage3 is partially complete", () => { + const completeStage0 = create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + preSatpTransferRequestMessageHash: "h3", + preSatpTransferResponseMessageHash: "h4", + }); + const completeStage1 = create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h1", + transferProposalReceiptMessageHash: "h2", + transferProposalRejectMessageHash: "h3", + transferCommenceRequestMessageHash: "h4", + transferCommenceResponseMessageHash: "h5", + }); + const completeStage2 = create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "h1", + lockAssertionReceiptMessageHash: "h2", + }); + const partialStage3 = create(Stage3HashesSchema, { + commitPreparationRequestMessageHash: "h1", + // missing other Stage3 hashes + }); + const hashes = create(MessageStagesHashesSchema, { + stage0: completeStage0, + stage1: completeStage1, + stage2: completeStage2, + stage3: partialStage3, + }); + const mockSession = createMockSession(hashes); + + const strategy = factory.createStrategy(mockSession); + expect(strategy).toBeInstanceOf(Stage3RollbackStrategy); + }); + + it("should not rollback if all stages are complete", () => { + const completeStage0 = create(Stage0HashesSchema, { + newSessionRequestMessageHash: "h1", + newSessionResponseMessageHash: "h2", + preSatpTransferRequestMessageHash: "h3", + preSatpTransferResponseMessageHash: "h4", + }); + const completeStage1 = create(Stage1HashesSchema, { + transferProposalRequestMessageHash: "h1", + transferProposalReceiptMessageHash: "h2", + transferProposalRejectMessageHash: "h3", + transferCommenceRequestMessageHash: "h4", + transferCommenceResponseMessageHash: "h5", + }); + const completeStage2 = create(Stage2HashesSchema, { + lockAssertionRequestMessageHash: "h1", + lockAssertionReceiptMessageHash: "h2", + }); + const completeStage3 = create(Stage3HashesSchema, { + commitPreparationRequestMessageHash: "h1", + commitReadyResponseMessageHash: "h2", + commitFinalAssertionRequestMessageHash: "h3", + commitFinalAcknowledgementReceiptResponseMessageHash: "h4", + transferCompleteMessageHash: "h5", + transferCompleteResponseMessageHash: "h6", + }); + const hashes = create(MessageStagesHashesSchema, { + stage0: completeStage0, + stage1: completeStage1, + stage2: completeStage2, + stage3: completeStage3, + }); + const mockSession = createMockSession(hashes); + + expect(() => factory.createStrategy(mockSession)).toThrowError( + "No rollback needed as all stages are complete.", + ); + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/scenarios.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/scenarios.test.ts new file mode 100644 index 0000000000..16d98a8665 --- /dev/null +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/crash-management/scenarios.test.ts @@ -0,0 +1,490 @@ +import "jest-extended"; +import { + LogLevelDesc, + Secp256k1Keys, + JsObjectSigner, + IJsObjectSignerOptions, +} from "@hyperledger/cactus-common"; +import { CrashManager } from "../../../../main/typescript/gol/crash-manager"; +import { CrashStatus } from "../../../../main/typescript/core/types"; +import { ICrashRecoveryManagerOptions } from "../../../../main/typescript/gol/crash-manager"; +import { Knex, knex } from "knex"; +import { + LocalLog, + SupportedChain, + GatewayIdentity, + Address, +} from "../../../../main/typescript/core/types"; +import { + AssetSchema, + CredentialProfile, + LockType, + SignatureAlgorithm, +} from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/message_pb"; +import { v4 as uuidv4 } from "uuid"; +import { SessionData } from "../../../../main/typescript/generated/proto/cacti/satp/v02/common/session_pb"; +import { SATP_VERSION } from "../../../../main/typescript/core/constants"; +import { SATPSession } from "../../../../main/typescript/core/satp-session"; +import { + knexClientConnection, + knexSourceRemoteConnection, +} from "../../knex.config"; +import { + bufArray2HexStr, + getSatpLogKey, +} from "../../../../main/typescript/gateway-utils"; +import { TokenType } from "../../../../main/typescript/core/stage-services/satp-bridge/types/asset"; +import { RecoverUpdateMessage } from "../../../../main/typescript/generated/proto/cacti/satp/v02/crash_recovery_pb"; +import { + GatewayOrchestrator, + IGatewayOrchestratorOptions, +} from "../../../../main/typescript/gol/gateway-orchestrator"; +import { + ISATPBridgesOptions, + SATPBridgesManager, +} from "../../../../main/typescript/gol/satp-bridges-manager"; +import { create } from "@bufbuild/protobuf"; + +import { + SATP_ARCHITECTURE_VERSION, + SATP_CORE_VERSION, + SATP_CRASH_VERSION, +} from "../../../../main/typescript/core/constants"; +import { KnexLocalLogRepository } from "../../../../main/typescript/repository/knex-local-log-repository"; +import { KnexRemoteLogRepository } from "../../../../main/typescript/repository/knex-remote-log-repository"; +import { + ILocalLogRepository, + IRemoteLogRepository, +} from "../../../../main/typescript/repository/interfaces/repository"; +import { stringify as safeStableStringify } from "safe-stable-stringify"; + +let mockSession: SATPSession; +const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); + +const createMockSession = (maxTimeout: string, maxRetries: string) => { + const sessionId = uuidv4(); + const mockSession = new SATPSession({ + contextID: "MOCK_CONTEXT_ID", + server: false, + client: true, + }); + + const sessionData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + sessionData.id = sessionId; + sessionData.maxTimeout = maxTimeout; + sessionData.maxRetries = maxRetries; + sessionData.version = SATP_VERSION; + sessionData.clientGatewayPubkey = Buffer.from(keyPairs.publicKey).toString( + "hex", + ); + sessionData.serverGatewayPubkey = sessionData.clientGatewayPubkey; + sessionData.originatorPubkey = "MOCK_ORIGINATOR_PUBKEY"; + sessionData.beneficiaryPubkey = "MOCK_BENEFICIARY_PUBKEY"; + sessionData.digitalAssetId = "MOCK_DIGITAL_ASSET_ID"; + sessionData.assetProfileId = "MOCK_ASSET_PROFILE_ID"; + sessionData.receiverGatewayOwnerId = "MOCK_RECEIVER_GATEWAY_OWNER_ID"; + sessionData.recipientGatewayNetworkId = SupportedChain.FABRIC; + sessionData.senderGatewayOwnerId = "MOCK_SENDER_GATEWAY_OWNER_ID"; + sessionData.senderGatewayNetworkId = SupportedChain.BESU; + sessionData.signatureAlgorithm = SignatureAlgorithm.RSA; + sessionData.lockType = LockType.FAUCET; + sessionData.lockExpirationTime = BigInt(1000); + sessionData.credentialProfile = CredentialProfile.X509; + sessionData.loggingProfile = "MOCK_LOGGING_PROFILE"; + sessionData.accessControlProfile = "MOCK_ACCESS_CONTROL_PROFILE"; + sessionData.resourceUrl = "MOCK_RESOURCE_URL"; + sessionData.lockAssertionExpiration = BigInt(99999); + sessionData.receiverContractOntology = "MOCK_RECEIVER_CONTRACT_ONTOLOGY"; + sessionData.senderContractOntology = "MOCK_SENDER_CONTRACT_ONTOLOGY"; + sessionData.sourceLedgerAssetId = "MOCK_SOURCE_LEDGER_ASSET_ID"; + sessionData.senderAsset = create(AssetSchema, { + tokenId: "MOCK_TOKEN_ID", + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_SENDER_ASSET_OWNER", + ontology: "MOCK_SENDER_ASSET_ONTOLOGY", + contractName: "MOCK_SENDER_ASSET_CONTRACT_NAME", + contractAddress: "MOCK_SENDER_ASSET_CONTRACT_ADDRESS", + }); + sessionData.receiverAsset = create(AssetSchema, { + tokenType: TokenType.ERC20, + amount: BigInt(100), + owner: "MOCK_RECEIVER_ASSET_OWNER", + ontology: "MOCK_RECEIVER_ASSET_ONTOLOGY", + contractName: "MOCK_RECEIVER_ASSET_CONTRACT_NAME", + mspId: "MOCK_RECEIVER_ASSET_MSP_ID", + channelName: "MOCK_CHANNEL_ID", + }); + return mockSession; +}; +let crashManager: CrashManager; +let knexInstanceClient: Knex; +let knexInstanceRemote: Knex; +let localRepository: ILocalLogRepository; +let remoteRepository: IRemoteLogRepository; + +beforeAll(async () => { + knexInstanceClient = knex(knexClientConnection); + await knexInstanceClient.migrate.latest(); + knexInstanceRemote = knex(knexSourceRemoteConnection); + await knexInstanceRemote.migrate.latest(); + + localRepository = new KnexLocalLogRepository(knexClientConnection); + remoteRepository = new KnexRemoteLogRepository(knexClientConnection); + + const keyPairs = Secp256k1Keys.generateKeyPairsBuffer(); + const signerOptions: IJsObjectSignerOptions = { + privateKey: bufArray2HexStr(keyPairs.privateKey), + logLevel: "debug", + }; + const signer = new JsObjectSigner(signerOptions); + + const gatewayIdentity = { + id: "mockID-1", + name: "CustomGateway", + version: [ + { + Core: SATP_CORE_VERSION, + Architecture: SATP_ARCHITECTURE_VERSION, + Crash: SATP_CRASH_VERSION, + }, + ], + supportedDLTs: [SupportedChain.BESU], + proofID: "mockProofID10", + address: "http://localhost" as Address, + } as GatewayIdentity; + + const orchestratorOptions: IGatewayOrchestratorOptions = { + logLevel: "DEBUG", + localGateway: gatewayIdentity, + counterPartyGateways: [], + signer: signer, + }; + const gatewayOrchestrator = new GatewayOrchestrator(orchestratorOptions); + + const bridgesManagerOptions: ISATPBridgesOptions = { + logLevel: "DEBUG", + supportedDLTs: gatewayIdentity.supportedDLTs, + networks: [], + }; + const bridgesManager = new SATPBridgesManager(bridgesManagerOptions); + + const crashOptions: ICrashRecoveryManagerOptions = { + instanceId: "test-instance", + logLevel: "DEBUG" as LogLevelDesc, + bridgeConfig: bridgesManager, + orchestrator: gatewayOrchestrator, + localRepository: localRepository, + remoteRepository: remoteRepository, + signer: signer, + pubKey: bufArray2HexStr(keyPairs.publicKey), + }; + crashManager = new CrashManager(crashOptions); +}); + +afterEach(async () => { + crashManager["sessions"].clear(); + jest.clearAllMocks(); +}); + +afterAll(async () => { + if (crashManager) { + crashManager.stopScheduler(); + crashManager.localRepository.destroy(); + crashManager.remoteRepository.destroy(); + } + if (knexInstanceClient || knexInstanceRemote) { + await knexInstanceClient.destroy(); + await knexInstanceRemote.destroy(); + } +}); + +describe("CrashManager Tests", () => { + it("should reconstruct session by fetching logs", async () => { + mockSession = createMockSession("1000", "3"); + + const testData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + const sessionId = testData.id; + + // load sample log in database + const key = getSatpLogKey(sessionId, "type", "operation"); + const mockLogEntry: LocalLog = { + sessionId: sessionId, + type: "type", + key: key, + operation: "operation", + timestamp: new Date().toISOString(), + data: safeStableStringify(testData), + sequenceNumber: Number(testData.lastSequenceNumber), + }; + const mockLogRepository = crashManager["localRepository"]; + + await mockLogRepository.create(mockLogEntry); + await crashManager.recoverSessions(); + + expect(crashManager["sessions"].has(sessionId)).toBeTrue(); + + const recoveredSession = crashManager["sessions"].get(sessionId); + + expect(recoveredSession).toBeDefined(); + + if (recoveredSession) { + const parsedSessionData: SessionData = JSON.parse(mockLogEntry.data); + const sessionData = recoveredSession.hasClientSessionData() + ? recoveredSession.getClientSessionData() + : recoveredSession.getServerSessionData(); + + expect(sessionData).toEqual(parsedSessionData); + } + }); + + it("should invoke rollback based on session timeout", async () => { + mockSession = createMockSession("1000", "3"); // timeout of 1 sec + // client-side test + const testData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + const sessionId = testData.id; + + const handleRollbackSpy = jest + .spyOn(crashManager, "initiateRollback") + .mockImplementation(async () => true); + + const key = getSatpLogKey(sessionId, "type_o", "done"); + + const pastTime = new Date(Date.now() - 10000).toISOString(); + + const mockLogEntry: LocalLog = { + sessionId: sessionId, + type: "type_o", + key: key, + operation: "done", + timestamp: pastTime, + data: safeStableStringify(testData), + sequenceNumber: Number(testData.lastSequenceNumber), + }; + + const mockLogRepository = crashManager["localRepository"]; + + await mockLogRepository.create(mockLogEntry); + + await crashManager.checkAndResolveCrash(mockSession); + + expect(handleRollbackSpy).toHaveBeenCalled(); + + handleRollbackSpy.mockRestore(); + }); + + it("should not recover if no crash is detected", async () => { + mockSession = createMockSession("10000", "3"); + + const testData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + + const mockLogEntry: LocalLog = { + sessionId: testData.id, + type: "type", + key: getSatpLogKey(testData.id, "type", "done"), + operation: "done", + timestamp: new Date().toISOString(), + data: safeStableStringify(testData), + sequenceNumber: Number(testData.lastSequenceNumber), + }; + + await crashManager.localRepository.create(mockLogEntry); + + const handleRecoverySpy = jest.spyOn(crashManager, "handleRecovery"); + const initiateRollbackSpy = jest.spyOn(crashManager, "initiateRollback"); + + await crashManager.checkAndResolveCrash(mockSession); + + expect(handleRecoverySpy).not.toHaveBeenCalled(); + expect(initiateRollbackSpy).not.toHaveBeenCalled(); + }); + + it("should invoke handleRecovery when crash is initially detected", async () => { + mockSession = createMockSession("1000", "3"); + + const handleRecoverySpy = jest + .spyOn(crashManager, "handleRecovery") + .mockImplementation(async () => true); + + jest + .spyOn(crashManager as any, "checkCrash") + .mockImplementation(() => Promise.resolve(CrashStatus.IN_RECOVERY)); + + await crashManager.checkAndResolveCrash(mockSession); + + expect(handleRecoverySpy).toHaveBeenCalled(); + + handleRecoverySpy.mockRestore(); + }); + + it("should invoke initiateRollback when recovery attempts are exhausted", async () => { + mockSession = createMockSession("1000", "3"); + + const handleRecoverySpy = jest + .spyOn(crashManager, "handleRecovery") + .mockImplementation(async () => false); + + const initiateRollbackSpy = jest + .spyOn(crashManager, "initiateRollback") + .mockImplementation(async () => true); + + jest + .spyOn(crashManager as any, "checkCrash") + .mockImplementation(() => Promise.resolve(CrashStatus.IN_RECOVERY)); + + await crashManager.checkAndResolveCrash(mockSession); + + expect(handleRecoverySpy).toHaveBeenCalled(); + expect(initiateRollbackSpy).toHaveBeenCalled(); + + handleRecoverySpy.mockRestore(); + initiateRollbackSpy.mockRestore(); + }); + + it("should detect crash based on incomplete operation in logs", async () => { + mockSession = createMockSession("10000", "3"); + + const testData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + const sessionId = testData.id; + + const handleRecoverySpy = jest + .spyOn(crashManager, "handleRecovery") + .mockImplementation(async () => true); + + const key = getSatpLogKey(sessionId, "type", "init"); + + const mockLogEntry: LocalLog = { + sessionId: sessionId, + type: "type", + key: key, + operation: "init", // operation!=done + timestamp: new Date().toISOString(), + data: safeStableStringify(testData), + sequenceNumber: Number(testData.lastSequenceNumber), + }; + + const mockLogRepository = crashManager["localRepository"]; + + await mockLogRepository.create(mockLogEntry); + + await crashManager.checkAndResolveCrash(mockSession); + + expect(handleRecoverySpy).toHaveBeenCalled(); + + handleRecoverySpy.mockRestore(); + }); + + it("should detect crash based on incomplete operation in logs and initiate rollback when recovery fails", async () => { + mockSession = createMockSession("10000", "3"); + + const testData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + const sessionId = testData.id; + + const handleRecoverySpy = jest + .spyOn(crashManager, "handleRecovery") + .mockImplementation(async () => false); + + const handleInitiateRollBackSpy = jest + .spyOn(crashManager, "initiateRollback") + .mockImplementation(async () => true); + + const key = getSatpLogKey(sessionId, "type3", "init"); + + const mockLogEntry: LocalLog = { + sessionId: sessionId, + type: "type3", + key: key, + operation: "init", // operation!=done + timestamp: new Date().toISOString(), + data: safeStableStringify(testData), + sequenceNumber: Number(testData.lastSequenceNumber), + }; + + const mockLogRepository = crashManager["localRepository"]; + + await mockLogRepository.create(mockLogEntry); + + await crashManager.checkAndResolveCrash(mockSession); + + expect(handleRecoverySpy).toHaveBeenCalled(); + expect(handleInitiateRollBackSpy).toHaveBeenCalled(); + + handleRecoverySpy.mockRestore(); + handleInitiateRollBackSpy.mockRestore(); + }); + + it("should process logs from counterparty gateway", async () => { + mockSession = createMockSession("10000", "3"); + + const testData = mockSession.hasClientSessionData() + ? mockSession.getClientSessionData() + : mockSession.getServerSessionData(); + const sessionId = testData.id; + + // Create an existing log entry for client + const existingLogEntry: LocalLog = { + sessionId: sessionId, + type: "type_client", + key: getSatpLogKey(sessionId, "type_client", "operation_client"), + operation: "operation_client", + timestamp: new Date().toISOString(), + data: safeStableStringify(testData), + sequenceNumber: Number(testData.lastSequenceNumber), + }; + + await crashManager.localRepository.create(existingLogEntry); + + // Create the log entry for server + const updatedLogEntry: LocalLog = { + sessionId: sessionId, + type: "type_server", + key: getSatpLogKey(sessionId, "type_server", "operation_server"), + operation: "operation_server", + timestamp: new Date().toISOString(), + data: safeStableStringify(testData), + sequenceNumber: Number(testData.lastSequenceNumber) + 1, + }; + + // RecoverUpdateMessage to simulate receiving the log from server + const recoverUpdateMessage = { + sessionId: sessionId, + messageType: "urn:ietf:SATP-2pc:msgtype:recover-update-msg", + hashRecoverMessage: "", + recoveredLogs: [updatedLogEntry], + senderSignature: "", + } as RecoverUpdateMessage; + + const result = + await crashManager["processRecoverUpdateMessage"](recoverUpdateMessage); + + expect(result).toBeTrue(); + + const reconstructedSession = crashManager["sessions"].get(sessionId); + expect(reconstructedSession).toBeDefined(); + + if (reconstructedSession) { + const reconstructedSessionData = + reconstructedSession.hasClientSessionData() + ? reconstructedSession.getClientSessionData() + : reconstructedSession.getServerSessionData(); + + expect(reconstructedSessionData).toBeDefined(); + expect(BigInt(reconstructedSessionData.lastSequenceNumber)).toEqual( + testData.lastSequenceNumber, + ); + } + }); +}); diff --git a/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/services.test.ts b/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/services.test.ts index d3fee5a3b3..b2b5b774df 100644 --- a/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/services.test.ts +++ b/packages/cactus-plugin-satp-hermes/src/test/typescript/unit/services.test.ts @@ -79,10 +79,10 @@ import { Knex, knex } from "knex"; import { KnexLocalLogRepository as LocalLogRepository } from "../../../main/typescript/repository/knex-local-log-repository"; import { KnexRemoteLogRepository as RemoteLogRepository } from "../../../main/typescript/repository/knex-remote-log-repository"; import { SATPLogger } from "../../../main/typescript/logging"; +import { create, isMessage } from "@bufbuild/protobuf"; let knexInstanceClient: Knex; // test as a client let knexInstanceRemote: Knex; -import { create, isMessage } from "@bufbuild/protobuf"; const logLevel: LogLevelDesc = "DEBUG"; diff --git a/packages/cactus-test-tooling/src/main/typescript/satp-runner/satp-gateway-runner.ts b/packages/cactus-test-tooling/src/main/typescript/satp-runner/satp-gateway-runner.ts index 0598f850fc..f384b4adf2 100644 --- a/packages/cactus-test-tooling/src/main/typescript/satp-runner/satp-gateway-runner.ts +++ b/packages/cactus-test-tooling/src/main/typescript/satp-runner/satp-gateway-runner.ts @@ -22,6 +22,8 @@ export interface ISATPGatewayRunnerConstructorOptions { outputLogFile?: string; errorLogFile?: string; knexDir?: string; + enableMigration?: boolean; + enableCrashManager?: boolean; } export const SATP_GATEWAY_RUNNER_DEFAULT_OPTIONS = Object.freeze({ @@ -30,6 +32,8 @@ export const SATP_GATEWAY_RUNNER_DEFAULT_OPTIONS = Object.freeze({ serverPort: 3010, clientPort: 3011, apiPort: 4010, + enableMigration: true, + enableCrashManager: false, }); export const SATP_GATEWAY_RUNNER_OPTIONS_JOI_SCHEMA: Joi.Schema = @@ -49,6 +53,8 @@ export const SATP_GATEWAY_RUNNER_OPTIONS_JOI_SCHEMA: Joi.Schema = .max(65535) .required(), apiPort: Joi.number().integer().positive().min(1024).max(65535).required(), + enableMigration: Joi.boolean().optional(), + enableCrashManager: Joi.boolean().optional(), }); export class SATPGatewayRunner implements ITestLedger { @@ -62,6 +68,8 @@ export class SATPGatewayRunner implements ITestLedger { public readonly outputLogFile?: string; public readonly errorLogFile?: string; public readonly knexDir?: string; + public readonly enableMigration: boolean; + public readonly enableCrashManager: boolean; private readonly log: Logger; private container: Container | undefined; @@ -85,6 +93,12 @@ export class SATPGatewayRunner implements ITestLedger { options.clientPort || SATP_GATEWAY_RUNNER_DEFAULT_OPTIONS.clientPort; this.apiPort = options.apiPort || SATP_GATEWAY_RUNNER_DEFAULT_OPTIONS.apiPort; + this.enableMigration = + options.enableMigration ?? + SATP_GATEWAY_RUNNER_DEFAULT_OPTIONS.enableMigration; + this.enableCrashManager = + options.enableCrashManager ?? + SATP_GATEWAY_RUNNER_DEFAULT_OPTIONS.enableCrashManager; this.configFile = options.configFile; this.outputLogFile = options.outputLogFile; this.errorLogFile = options.errorLogFile; @@ -309,6 +323,8 @@ export class SATPGatewayRunner implements ITestLedger { serverPort: this.serverPort, clientPort: this.clientPort, apiPort: this.apiPort, + enableMigration: this.enableMigration, + enableCrashManager: this.enableCrashManager, }); if (validationResult.error) { diff --git a/yarn.lock b/yarn.lock index 7b95ce1dd8..6688ec8254 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9506,6 +9506,7 @@ __metadata: "@types/fs-extra": "npm:11.0.4" "@types/google-protobuf": "npm:3.15.12" "@types/node": "npm:18.18.2" + "@types/node-schedule": "npm:2.1.7" "@types/pg": "npm:8.11.10" "@types/swagger-ui-express": "npm:4.1.6" "@types/tape": "npm:4.13.4" @@ -9534,6 +9535,7 @@ __metadata: knex: "npm:2.4.0" kubo-rpc-client: "npm:3.0.1" make-dir-cli: "npm:3.1.0" + node-schedule: "npm:2.1.1" npm-run-all: "npm:4.1.5" openzeppelin-solidity: "npm:3.4.2" pg: "npm:8.13.1" @@ -16411,6 +16413,15 @@ __metadata: languageName: node linkType: hard +"@types/node-schedule@npm:2.1.7": + version: 2.1.7 + resolution: "@types/node-schedule@npm:2.1.7" + dependencies: + "@types/node": "npm:*" + checksum: 10/cbcae09587563438896c01308702e46c212e44d11939b487d7019b74f4779668bf478d5e97880cb37e48514b02808db51b3469886a00ab3d0cac00e8a1b255df + languageName: node + linkType: hard + "@types/node-vault@npm:0.9.13": version: 0.9.13 resolution: "@types/node-vault@npm:0.9.13" @@ -23150,6 +23161,15 @@ __metadata: languageName: node linkType: hard +"cron-parser@npm:^4.2.0": + version: 4.9.0 + resolution: "cron-parser@npm:4.9.0" + dependencies: + luxon: "npm:^3.2.1" + checksum: 10/ffca5e532a5ee0923412ee6e4c7f9bbceacc6ddf8810c16d3e9fb4fe5ec7e2de1b6896d7956f304bb6bc96b0ce37ad7e3935304179d52951c18d84107184faa7 + languageName: node + linkType: hard + "cross-env@npm:7.0.3": version: 7.0.3 resolution: "cross-env@npm:7.0.3" @@ -36767,6 +36787,13 @@ __metadata: languageName: node linkType: hard +"long-timeout@npm:0.1.1": + version: 0.1.1 + resolution: "long-timeout@npm:0.1.1" + checksum: 10/48668e5362cb74c4b77a6b833d59f149b9bb9e99c5a5097609807e2597cd0920613b2a42b89bd0870848298be3691064d95599a04ae010023d07dba39932afa7 + languageName: node + linkType: hard + "long@npm:5.2.3, long@npm:^5.0.0, long@npm:^5.2.3": version: 5.2.3 resolution: "long@npm:5.2.3" @@ -36924,7 +36951,7 @@ __metadata: languageName: node linkType: hard -"luxon@npm:^3.3.0": +"luxon@npm:^3.2.1, luxon@npm:^3.3.0": version: 3.5.0 resolution: "luxon@npm:3.5.0" checksum: 10/48f86e6c1c96815139f8559456a3354a276ba79bcef0ae0d4f2172f7652f3ba2be2237b0e103b8ea0b79b47715354ac9fac04eb1db3485dcc72d5110491dd47f @@ -39246,6 +39273,17 @@ __metadata: languageName: node linkType: hard +"node-schedule@npm:2.1.1": + version: 2.1.1 + resolution: "node-schedule@npm:2.1.1" + dependencies: + cron-parser: "npm:^4.2.0" + long-timeout: "npm:0.1.1" + sorted-array-functions: "npm:^1.3.0" + checksum: 10/0b0449f8a1f784cd599a8d79b1fa404ed9e3e4e2b1a48f027c97fd0632cd86e48ad762d366d6b6f9d48a940cad5b7afbdb1b833649ee870407591a6cf1297749 + languageName: node + linkType: hard + "node-source-walk@npm:^6.0.0, node-source-walk@npm:^6.0.1, node-source-walk@npm:^6.0.2": version: 6.0.2 resolution: "node-source-walk@npm:6.0.2" @@ -45784,13 +45822,20 @@ __metadata: languageName: node linkType: hard -"safe-stable-stringify@npm:2.5.0, safe-stable-stringify@npm:^2.3.1, safe-stable-stringify@npm:^2.4.3": +"safe-stable-stringify@npm:2.5.0, safe-stable-stringify@npm:^2.4.3": version: 2.5.0 resolution: "safe-stable-stringify@npm:2.5.0" checksum: 10/2697fa186c17c38c3ca5309637b4ac6de2f1c3d282da27cd5e1e3c88eca0fb1f9aea568a6aabdf284111592c8782b94ee07176f17126031be72ab1313ed46c5c languageName: node linkType: hard +"safe-stable-stringify@npm:^2.3.1": + version: 2.3.1 + resolution: "safe-stable-stringify@npm:2.3.1" + checksum: 10/8a6ed4e5fb80694970f1939538518c44a59c71c74305e12b5964cbe3850636212eddac881da1f676b0232015213676e07750fe75bc402afbfe29851c8b52381e + languageName: node + linkType: hard + "safer-buffer@npm:>= 2.1.2 < 3, safer-buffer@npm:>= 2.1.2 < 3.0.0, safer-buffer@npm:^2.0.2, safer-buffer@npm:^2.1.0, safer-buffer@npm:~2.1.0": version: 2.1.2 resolution: "safer-buffer@npm:2.1.2" @@ -47071,6 +47116,13 @@ __metadata: languageName: node linkType: hard +"sorted-array-functions@npm:^1.3.0": + version: 1.3.0 + resolution: "sorted-array-functions@npm:1.3.0" + checksum: 10/673fd39ca3b6c92644d4483eac1700bb7d7555713a536822a7522a35af559bef3e72f10d89356b75042dc394cd7c2e2ab6f40024385218ec3c85bb7335032857 + languageName: node + linkType: hard + "source-list-map@npm:^2.0.0, source-list-map@npm:^2.0.1": version: 2.0.1 resolution: "source-list-map@npm:2.0.1"