diff --git a/.env.example b/.env.example index 31e75396..2c7ed8e8 100644 --- a/.env.example +++ b/.env.example @@ -1,77 +1,89 @@ -##### ORCHESTRATOR ##### - -MADARA_ORCHESTRATOR_HOST= -MADARA_ORCHESTRATOR_PORT= -MADARA_ORCHESTRATOR_MAX_BLOCK_NO_TO_PROCESS= - -##### AWS CONFIG ##### - -AWS_ACCESS_KEY_ID= -AWS_SECRET_ACCESS_KEY= -AWS_REGION= - -##### Omniqueue ##### - -AWS_DEFAULT_REGION="localhost" - -##### STORAGE ##### - -DATA_STORAGE= -MADARA_ORCHESTRATOR_AWS_S3_BUCKET_NAME= - -##### QUEUE ##### - -QUEUE_PROVIDER= -SQS_JOB_PROCESSING_QUEUE_URL= -SQS_JOB_VERIFICATION_QUEUE_URL= -SQS_JOB_HANDLE_FAILURE_QUEUE_URL= -SQS_WORKER_TRIGGER_QUEUE_URL= - -##### SNS ##### - -ALERTS="sns" -MADARA_ORCHESTRATOR_AWS_SNS_ARN="arn:aws:sns:us-east-1:000000000000:madara-orchestrator-arn" - -##### DATABASE ##### - -DATABASE= -MADARA_ORCHESTRATOR_MONGODB_CONNECTION_URL= -MADARA_ORCHESTRATOR_DATABASE_NAME= - -##### PROVER ##### - -PROVER_SERVICE= - -## if using sharp -MADARA_ORCHESTRATOR_SHARP_CUSTOMER_ID= -MADARA_ORCHESTRATOR_SHARP_URL= -MADARA_ORCHESTRATOR_SHARP_USER_CRT= -MADARA_ORCHESTRATOR_SHARP_USER_KEY= -MADARA_ORCHESTRATOR_SHARP_SERVER_CRT= -MADARA_ORCHESTRATOR_SHARP_PROOF_LAYOUT= - - -## if using atlantic -MADARA_ORCHESTRATOR_ATLANTIC_API_KEY= -MADARA_ORCHESTRATOR_ATLANTIC_URL= -MADARA_ORCHESTRATOR_MOCK_FACT_HASH= -MADARA_ORCHESTRATOR_PROVER_TYPE= - -##### ON CHAIN CONFIG ##### - -DA_LAYER= -SETTLEMENT_LAYER= -MADARA_ORCHESTRATOR_ETHEREUM_SETTLEMENT_RPC_URL= -MADARA_ORCHESTRATOR_MADARA_RPC_URL= -MADARA_ORCHESTRATOR_GPS_VERIFIER_CONTRACT_ADDRESS= -PRIVATE_KEY= -MADARA_ORCHESTRATOR_ETHEREUM_PRIVATE_KEY= -MADARA_ORCHESTRATOR_L1_CORE_CONTRACT_ADDRESS= - -##### STARKNET SETTLEMENT (L3s) ##### -MADARA_ORCHESTRATOR_STARKNET_PRIVATE_KEY= -MADARA_ORCHESTRATOR_STARKNET_ACCOUNT_ADDRESS= - -##### Instrumentation ##### -MADARA_ORCHESTRATOR_OTEL_SERVICE_NAME= -MADARA_ORCHESTRATOR_OTEL_COLLECTOR_ENDPOINT= +#### AWS CONFIG #### +AWS_ACCESS_KEY_ID= # Your AWS access key ID +AWS_SECRET_ACCESS_KEY= # Your AWS secret access key +AWS_REGION= # AWS region +# For AWS SDK +AWS_ENDPOINT_URL= # AWS endpoint URL +# For Omniqueue +AWS_DEFAULT_REGION= # AWS default region + +# For EventBridge +MADARA_ORCHESTRATOR_EVENT_BRIDGE_TYPE= # EventBridge type (rule/schedule) +MADARA_ORCHESTRATOR_EVENT_BRIDGE_TRIGGER_RULE_NAME= # EventBridge rule name +MADARA_ORCHESTRATOR_EVENT_BRIDGE_TRIGGER_ROLE_NAME= # EventBridge role name +MADARA_ORCHESTRATOR_EVENT_BRIDGE_TRIGGER_POLICY_NAME= # EventBridge policy name + +#### ALERTS #### +MADARA_ORCHESTRATOR_AWS_SNS_ARN= # SNS ARN for alerts + +#### DATA AVAILABILITY #### +## ETHEREUM ## +MADARA_ORCHESTRATOR_ETHEREUM_DA_RPC_URL= # Ethereum RPC URL for data availability + +#### DATABASE #### +## MONGODB ## +MADARA_ORCHESTRATOR_MONGODB_CONNECTION_URL= # MongoDB connection URL +MADARA_ORCHESTRATOR_DATABASE_NAME= # MongoDB database name + +#### PROVER #### +## SHARP ## +MADARA_ORCHESTRATOR_SHARP_CUSTOMER_ID= # SHARP customer ID +MADARA_ORCHESTRATOR_SHARP_URL= # SHARP service URL +MADARA_ORCHESTRATOR_SHARP_USER_CRT= # SHARP user certificate +MADARA_ORCHESTRATOR_SHARP_USER_KEY= # SHARP user private key +MADARA_ORCHESTRATOR_SHARP_SERVER_CRT= # SHARP server certificate +MADARA_ORCHESTRATOR_SHARP_RPC_NODE_URL= # SHARP RPC node URL +MADARA_ORCHESTRATOR_SHARP_PROOF_LAYOUT= # SHARP proof layout +MADARA_ORCHESTRATOR_GPS_VERIFIER_CONTRACT_ADDRESS= # GPS verifier contract address + +## ATLANTIC ## +MADARA_ORCHESTRATOR_ATLANTIC_API_KEY= # Atlantic API key +MADARA_ORCHESTRATOR_ATLANTIC_SERVICE_URL= # Atlantic service URL +MADARA_ORCHESTRATOR_ATLANTIC_MOCK_FACT_HASH= # Whether to use mock fact registry (true/false) +MADARA_ORCHESTRATOR_ATLANTIC_PROVER_TYPE= # Prover type (herodotus/starkware) +MADARA_ORCHESTRATOR_ATLANTIC_SETTLEMENT_LAYER= # Settlement layer (ethereum/starknet) +MADARA_ORCHESTRATOR_ATLANTIC_VERIFIER_CONTRACT_ADDRESS= # Atlantic verifier contract address +MADARA_ORCHESTRATOR_ATLANTIC_RPC_NODE_URL= # Atlantic RPC node URL + +#### QUEUE #### +## AWS SQS ## +MADARA_ORCHESTRATOR_SQS_PREFIX= # SQS queue prefix +MADARA_ORCHESTRATOR_SQS_SUFFIX= # SQS queue suffix +MADARA_ORCHESTRATOR_EVENT_BRIDGE_TARGET_QUEUE_NAME= # EventBridge target queue name +MADARA_ORCHESTRATOR_SQS_BASE_QUEUE_URL= # SQS base queue URL + +#### SETTLEMENT #### +## ETHEREUM ## +MADARA_ORCHESTRATOR_ETHEREUM_SETTLEMENT_RPC_URL= # Ethereum settlement RPC URL +MADARA_ORCHESTRATOR_ETHEREUM_PRIVATE_KEY= # Ethereum private key +MADARA_ORCHESTRATOR_L1_CORE_CONTRACT_ADDRESS= # L1 core contract address +MADARA_ORCHESTRATOR_STARKNET_OPERATOR_ADDRESS= # Starknet operator address + +## STARKNET ## +MADARA_ORCHESTRATOR_STARKNET_SETTLEMENT_RPC_URL= # Starknet settlement RPC URL +MADARA_ORCHESTRATOR_STARKNET_PRIVATE_KEY= # Starknet private key +MADARA_ORCHESTRATOR_STARKNET_ACCOUNT_ADDRESS= # Starknet account address +MADARA_ORCHESTRATOR_STARKNET_CAIRO_CORE_CONTRACT_ADDRESS= # Starknet Cairo core contract address +MADARA_ORCHESTRATOR_STARKNET_FINALITY_RETRY_WAIT_IN_SECS= # Retry wait time in seconds +# MADARA_ORCHESTRATOR_MADARA_BINARY_PATH= # Optional: Madara binary path + +#### STORAGE #### +## AWS S3 ## +MADARA_ORCHESTRATOR_AWS_S3_BUCKET_NAME= # S3 bucket name + +#### INSTRUMENTATION #### +## OTEL ## +MADARA_ORCHESTRATOR_OTEL_SERVICE_NAME= # OpenTelemetry service name +MADARA_ORCHESTRATOR_OTEL_COLLECTOR_ENDPOINT= # OpenTelemetry collector endpoint + +#### SERVER #### +MADARA_ORCHESTRATOR_HOST= # Server host +MADARA_ORCHESTRATOR_PORT= # Server port + +#### SERVICE #### +MADARA_ORCHESTRATOR_MAX_BLOCK_NO_TO_PROCESS= # Maximum block number to process (optional) +MADARA_ORCHESTRATOR_MIN_BLOCK_NO_TO_PROCESS= # Minimum block number to process (optional) +MADARA_ORCHESTRATOR_MADARA_RPC_URL= # Madara RPC URL + +#### SNOS #### +MADARA_ORCHESTRATOR_RPC_FOR_SNOS= # SNOS RPC URL \ No newline at end of file diff --git a/.env.test b/.env.test index 73d1d6da..95804a57 100644 --- a/.env.test +++ b/.env.test @@ -10,7 +10,7 @@ AWS_ENDPOINT_URL=http://localhost.localstack.cloud:4566 AWS_DEFAULT_REGION=localhost # For EventBridge - +MADARA_ORCHESTRATOR_EVENT_BRIDGE_TYPE=rule MADARA_ORCHESTRATOR_EVENT_BRIDGE_TRIGGER_RULE_NAME=madara-orchestrator-worker-trigger MADARA_ORCHESTRATOR_EVENT_BRIDGE_TRIGGER_ROLE_NAME=madara-orchestrator-worker-trigger-role MADARA_ORCHESTRATOR_EVENT_BRIDGE_TRIGGER_POLICY_NAME=madara-orchestrator-worker-trigger-policy diff --git a/.markdownlint.json b/.markdownlint.json index 68e7410c..500ea857 100644 --- a/.markdownlint.json +++ b/.markdownlint.json @@ -4,6 +4,7 @@ "MD045": false, "MD003": false, "MD013": { - "code_blocks": false + "code_blocks": false, + "line_length": 120 } } diff --git a/.prettierignore b/.prettierignore index 1243185b..2158a86d 100644 --- a/.prettierignore +++ b/.prettierignore @@ -7,3 +7,4 @@ pathfinder/ orchestrator_venv/ build/ node_modules/ +scripts/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 29e14f69..6e8b0acf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## Added +- readme: setup instructions added +- Added : Grafana dashboard - tests: http_client tests added - Added Atlantic proving service integration - setup functions added for cloud and db @@ -48,6 +50,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## Changed +- refactor: Readme and .env.example - refactor: http_mock version updated - refactor: prover-services renamed to prover-clients - refactor: update json made generic to update any json file @@ -80,6 +83,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## Fixed +- refactor: instrumentation +- `is_worker_enabled` status check moved from `VerificationFailed` to `Failed` - refactor: static attributes for telemetry - refactor: aws setup for Event Bridge - refactor: RUST_LOG filtering support diff --git a/README.md b/README.md index 8ed59c5f..ddc2e429 100644 --- a/README.md +++ b/README.md @@ -1,27 +1,520 @@ -# Madara Orchestrator +# Madara Orchestrator šŸŽ­ The Madara orchestrator is designed to be an additional service which runs in -parallel to Madara and handles +parallel to Madara and handles various critical jobs that ensure proper block +processing, proof generation, data submission and state transitions. -1. publishing data to the respective DA layer -2. running SNOS and submitting jobs to the prover -3. updating the state on Cairo core contracts +> šŸ“ **Note**: These instructions are verified for Ubuntu systems with AMD64 architecture. While most steps remain similar +> for macOS, some package names and installation commands may differ. -The tentative flow of the orchestrator looks like this but this is subject to -change as we learn more about external systems and the constraints involved. +## Table of Contents -![orchestrator_da_sequencer_diagram](./docs/orchestrator_da_sequencer_diagram.png) +- [Overview](#-overview) +- [Architecture](#ļø-architecture) + - [Job Processing Model](#job-processing-model) + - [Queue Structure](#queue-structure) + - [Workflow](#workflow) +- [Technical Requirements](#ļø-technical-requirements) + - [System Dependencies](#system-dependencies) + - [Core Dependencies](#core-dependencies) +- [Installation & Setup](#-installation--setup) + - [Building from Source](#building-from-source) + - [Local Development Setup](#local-development-setup) + - [Setup Mode](#setup-mode) + - [Run Mode](#run-mode) + - [Command Line Options](#command-line-options) +- [Configuration](#ļø-configuration) + - [AWS Configuration](#aws-configuration) + - [Prover Configuration](#prover-configuration) + - [Database Configuration](#database-configuration) +- [Testing](#-testing) + - [Local Environment Setup](#local-environment-setup) + - [Types of Tests](#types-of-tests) + - [Running Tests](#running-tests) +- [Monitoring](#-monitoring) +- [Error Handling](#-error-handling) +- [Additional Resources](#additional-resources) -## Testing +## šŸ“‹ Overview -- Files needed for tests can be fetched through s3 : +The Madara Orchestrator coordinates and triggers five primary jobs in sequence, +managing their execution through a centralized queue system, allowing +for multiple orchestrators to run together! - ```shell - wget -P ./crates/prover-services/sharp-service/tests/artifacts https://madara-orchestrator-sharp-pie.s3.amazonaws.com/238996-SN.zip - ``` +1. **SNOS (Starknet OS) Job** šŸ”„ -- To run all the tests : + - Identifies blocks that need processing. + - Triggers SNOS run on identified blocks. + - Tracks SNOS execution status and PIE (Program Independent Execution) generation - ```shell - cargo llvm-cov nextest --release --lcov --output-path lcov.info --test-threads=1 - ``` +2. **Proving Job** āœ… + + - Coordinates proof generation by submitting PIE to proving services + - Tracks proof generation progress and completion + +3. **Data Submission Job** šŸ“¤ + + - Manages state update data preparation for availability layers + - If needed, coordinates blob submission to data availability layers + - Currently integrates with Ethereum (EIP-4844 blob transactions) + - Additional DA layer integrations in development (e.g., Celestia) + +4. **State Transition Job** šŸ”„ + - Coordinates state transitions with settlement layers + - Manages proof and state update submissions + - Handles integration with Ethereum and Starknet settlement layers + +Each job is managed through a queue-based system where the orchestrator: + +- Determines when and which blocks need processing +- Triggers the appropriate services +- Monitors job progress +- Manages job dependencies and sequencing +- Handles retries and failure cases + +## šŸ›ļø Architecture + +### Job Processing Model + +The orchestrator implements a queue-based architecture where each job type +follows a three-phase execution model: + +1. **Creation**: Jobs are spawned based on block availability +2. **Processing**: Core job logic execution +3. **Verification**: Result validation and confirmation + +### Queue Structure + +The system uses dedicated queues for managing different job phases: + +- Worker Trigger Queue +- SNOS Processing/Verification Queues +- Proving Processing/Verification Queues +- Data Submission Processing/Verification Queues +- State Update Processing/Verification Queues +- Job Failure Handling Queue + +### Workflow + +1. Cron jobs trigger worker tasks via the worker-trigger queue +2. Workers determine block-level job requirements +3. Jobs are created and added to processing queues +4. Processed jobs move to verification queues +5. Verified jobs are marked as complete in the database + +## šŸ› ļø Technical Requirements + +### System Dependencies + +> For macOS users, use `brew install` instead of `apt install` for these dependencies. + +- Build essentials (`build-essential`) +- OpenSSL (`libssl-dev`) +- Package config (`pkg-config`) +- Python 3.9 with development files +- GMP library (`libgmp-dev`) + +### Core Dependencies + +- [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) +- [Rust](https://www.rust-lang.org/tools/install) +- [Madara Node](https://github.com/madara-alliance/madara) + - Required for block processing + - Follow setup instructions at [Madara Documentation](https://github.com/madara-alliance/madara) +- Prover Service (ATLANTIC) +- MongoDB for job management +- AWS services (or Localstack for local development): + - SQS for queues + - S3 for data storage + - SNS for alerts + - EventBridge for scheduling + +> šŸšØ **Important Note**: SNOS requires the `get_storage_proof` RPC endpoint to function. +> Currently, this endpoint is not implemented in Madara. +> +> šŸš§ Until madara implements the `get_storage_proof` endpoint, you need to run Pathfinder alongside Madara: +> +> - Madara will run in sequencer mode +> - Pathfinder will sync with Madara +> - The orchestrator will use Pathfinder's RPC URL for SNOS and state update fetching +> +> This setup is temporary until either: +> +> 1. SNOS is adapted to work without the `get_storage_proof` endpoint, or +> 2. The `get_storage_proof` endpoint is implemented in Madara + +## šŸš€ Installation & Setup + +### Building from Source + +1. **Install System Dependencies** + + ```bash + # Ubuntu/Debian + sudo apt-get update + sudo apt install build-essential openssl pkg-config libssl-dev + sudo apt install python3.9 python3.9-venv python3.9-distutils libgmp-dev python3.9-dev + + # For macOS + brew install openssl pkg-config gmp python@3.9 + ``` + + > šŸšØ **Note**: python 3.9 is required for the `SNOS` to create `os_latest.json` hence the `python3.9` in the above command. + +2. **Install Rust** (Cross-platform) + + ```bash + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + source ~/.bashrc # Or source ~/.zshrc for macOS + ``` + +3. **Clone Repository** + + ```bash + git clone https://github.com/madara-alliance/madara-orchestrator.git + cd madara-orchestrator + git submodule update --init + ``` + +4. **Build SNOS** + + ```bash + make snos + ``` + + > šŸšØ **Note**: python 3.9 is required for the `SNOS` to create `os_latest.json` + +5. **Build Project** + + ```bash + cargo build --release + ``` + +### Local Development Setup + +1. **Install Docker** (Cross-platform) + Follow the official installation guides: + + - [Ubuntu Installation Guide](https://docs.docker.com/engine/install/ubuntu/#install-using-the-repository) + - [macOS Installation Guide](https://docs.docker.com/desktop/install/mac-install/) + +2. **Install Foundry** (Cross-platform) + + ```bash + curl -L https://foundry.paradigm.xyz | bash + foundryup + ``` + +3. **Start Local Services** + + ```bash + # Start MongoDB + docker run -d -p 27017:27017 mongo + + # Start Localstack + docker run -d -p 4566:4566 localstack/localstack@sha256:763947722c6c8d33d5fbf7e8d52b4bddec5be35274a0998fdc6176d733375314 + + # Start Anvil in a separate terminal + anvil --block-time 1 + ``` + +4. **Setup Mock Proving Service** + + šŸš§ This setup is for development purposes only: + + ```bash + # Start the mock prover service using Docker + docker run -d -p 6000:6000 ocdbytes/mock-prover:latest + + # Set the mock prover URL in your .env + MADARA_ORCHESTRATOR_SHARP_URL=http://localhost:6000 + ``` + +5. **Run Pathfinder** (Choose one method) + + > šŸšØ **Important Note**: + > + > - Pathfinder requires a WebSocket Ethereum endpoint (`ethereum.url`). Since Anvil doesn't support WebSocket yet, + > you'll need to provide a different Ethereum endpoint (e.g., Alchemy, Infura). This is okay for local development + > as Pathfinder only uses this to get the state update from core contract. + > - Make sure `chain-id` matches your Madara chain ID (default: `MADARA_DEVNET`) + > - The `gateway-url` and `feeder-gateway-url` should point to your local Madara node (default: `http://localhost:8080`) + + a. **From Source** (Recommended for development) + + ```bash + # Clone the repository + git clone https://github.com/eqlabs/pathfinder.git + cd pathfinder + + # Run pathfinder + cargo run --bin pathfinder -- \ + --network custom \ + --chain-id MADARA_DEVNET \ + --ethereum.url wss://eth-sepolia.g.alchemy.com/v2/xxx \ # Replace with your Ethereum endpoint + --gateway-url http://localhost:8080/gateway \ + --feeder-gateway-url http://localhost:8080/feeder_gateway \ + --storage.state-tries archive \ + --data-directory ~/Desktop/pathfinder_db/ \ + --http-rpc 127.0.0.1:9545 + ``` + + b. **Using Docker** + + ```bash + # Create data directory + mkdir -p ~/pathfinder_data + + # Run pathfinder container + docker run \ + --name pathfinder \ + --restart unless-stopped \ + -p 9545:9545 \ + --user "$(id -u):$(id -g)" \ + -e RUST_LOG=info \ + -v ~/pathfinder_data:/usr/share/pathfinder/data \ + eqlabs/pathfinder \ + --network custom \ + --chain-id MADARA_DEVNET \ + --ethereum.url wss://eth-sepolia.g.alchemy.com/v2/xxx \ # Replace with your Ethereum endpoint + --gateway-url http://localhost:8080/gateway \ + --feeder-gateway-url http://localhost:8080/feeder_gateway \ + --storage.state-tries archive + ``` + +6. **Deploy Mock Verifier Contract** + + šŸš§ For development purposes, you can deploy the mock verifier contract using: + + ```bash + ./scripts/dummy_contract_deployment.sh http://localhost:9944 0 + ``` + + This script: + + - Takes the Madara endpoint and block number as parameters + - Automatically deploys both the verifier contract and core contract + - Sets up the necessary contract relationships + - The deployed contract addresses will be output to the console + + ```bash + MADARA_ORCHESTRATOR_L1_CORE_CONTRACT_ADDRESS= + MADARA_ORCHESTRATOR_VERIFIER_ADDRESS= + ``` + +šŸš§ Note: The mock services are intended for development and testing purposes only. +In production, you'll need to use actual proving services and verifier contracts. + +### Setup Mode + +Setup mode configures the required AWS services and dependencies. +Use the following command: + +```bash +cargo run --release --bin orchestrator setup --aws --aws-s3 --aws-sqs --aws-sns --aws-event-bridge --event-bridge-type rule +``` + +> šŸšØ **Note**: +> +> - Setup mode is currently in development. A fresh setup is required +> if the process fails mid-way. +> - The `event-bridge-type` needs to be `rule` in case of localstack. +> - The `event-bridge-type` should be `schedule` in case of AWS. + +### Run Mode + +Run mode executes the orchestrator's job processing workflow. Example command: + +```bash +RUST_LOG=info cargo run --release --bin orchestrator run \ + --sharp \ + --aws \ + --settle-on-ethereum \ + --aws-s3 \ + --aws-sqs \ + --aws-sns \ + --da-on-ethereum \ + --mongodb +``` + +### Command Line Options + +1. **Prover Services** (choose one): + + - `--atlantic`: Use Atlantic prover + - `--sharp`: Use SHARP prover + +2. **Settlement Layer** (choose one): + + - `--settle-on-ethereum`: Use Ethereum + - `--settle-on-starknet`: Use Starknet + +3. **Data Availability**: + + - `--da-on-ethereum`: Use Ethereum + +4. **Infrastructure**: + + - `--aws`: Use AWS services (or Localstack) + +5. **Data Storage**: + + - `--aws-s3`: Store state updates and program outputs + +6. **Database**: + + - `--mongodb`: Store job information + +7. **Queue System**: + + - `--aws-sqs`: Message queue service + +8. **Alerting**: + + - `--aws-sns`: Notification service + +9. **Event Bridge Scheduling**: + + - `--aws-event-bridge`: Enable AWS Event Bridge + - `--event-bridge-type`: Specify the type of Event Bridge (rule or schedule) + +10. **Monitoring**: + - `--otel-service-name`: OpenTelemetry service name + - `--otel-collector-endpoint`: OpenTelemetry collector endpoint + +## āš™ļø Configuration + +The orchestrator uses environment variables for configuration. +Create a `.env` file with the following sections: + +### AWS Configuration + +```env +AWS_ACCESS_KEY_ID= +AWS_SECRET_ACCESS_KEY= +AWS_REGION= +``` + +Note: These configurations are also picked up from your AWS credentials file (~/.aws/credentials) +or environment variables if not specified in the .env file. + +### Prover Configuration + +```env +# SHARP Configuration +MADARA_ORCHESTRATOR_SHARP_CUSTOMER_ID= +MADARA_ORCHESTRATOR_SHARP_URL= +# or +# ATLANTIC Configuration +MADARA_ORCHESTRATOR_ATLANTIC_API_KEY= +MADARA_ORCHESTRATOR_ATLANTIC_SERVICE_URL= +``` + +### Database Configuration + +```env +MADARA_ORCHESTRATOR_MONGODB_CONNECTION_URL=mongodb://localhost:27017 +MADARA_ORCHESTRATOR_DATABASE_NAME=orchestrator +``` + +For a complete list of configuration options, refer to the `.env.example` file +in the repository. + +## šŸ” Monitoring + +The orchestrator includes a telemetry system that tracks: + +- Job execution metrics +- Processing time statistics +- RPC performance metrics + +OpenTelemetry integration is available for detailed monitoring. +It requires a `Otel-collector` url to be able to send metrics/logs/traces. + +## šŸ› Error Handling + +- Failed jobs are moved to a dedicated failure handling queue +- Automatic retry mechanism with configurable intervals +- Failed jobs are tracked in the database for manual inspection after maximum retries +- Integrated telemetry system for monitoring job failures + +## šŸ““ Testing + +### Local Environment Setup + +šŸš§ This setup is for development purposes. For production deployment, please refer to our deployment documentation. + +Before running tests, ensure you have: + +1. **Required Services Running**: + + - MongoDB on port 27017 + - Localstack on port 4566 + - Anvil (local Ethereum node) + +2. **Environment Configuration**: + + ```bash + export MADARA_ORCHESTRATOR_ETHEREUM_SETTLEMENT_RPC_URL= + export MADARA_ORCHESTRATOR_RPC_FOR_SNOS= + export AWS_REGION=us-east-1 + ``` + +### Types of Tests + +1. **E2E Tests** šŸ”„ + + šŸš§ Development test environment: + + - End-to-end workflow testing + - Tests orchestrator functionality on block 66645 of Starknet + - Uses mocked proving endpoints + +2. **Integration & Unit Tests** šŸ”Œ + - Tests component interactions + - Verifies individual functionalities + +### Running Tests + +#### Running E2E Tests + +```bash +RUST_LOG=info cargo test --features testing test_orchestrator_workflow -- --nocapture +``` + +#### Running Integration and Unit Tests + +```bash +RUST_LOG=debug RUST_BACKTRACE=1 cargo llvm-cov nextest \ + --release \ + --features testing \ + --lcov \ + --output-path lcov.info \ + --test-threads=1 \ + --workspace \ + --exclude=e2e-tests \ + --no-fail-fast +``` + +This command: + +- Generates detailed coverage reports in LCOV format +- Excludes E2E tests from coverage analysis +- Runs tests sequentially (single thread) +- Continues testing even if failures occur +- Enables debug logging and full backtraces for better error + diagnosis + +The coverage report (`lcov.info`) can be used with various code coverage +visualization tools. + +## šŸ““ More Information + +- Read the architecture present at `./docs/orchestrator_da_sequencer_diagram.png` + +## Additional Resources + +- Architecture Diagram: See `./docs/orchestrator_da_sequencer_diagram.png` +- [Madara Documentation](https://github.com/madara-alliance/madara) +- [LocalStack Documentation](https://docs.localstack.cloud/) +- [Foundry Documentation](https://book.getfoundry.sh/) diff --git a/crates/orchestrator/src/cli/cron/event_bridge.rs b/crates/orchestrator/src/cli/cron/event_bridge.rs index e9e83c25..0602d36e 100644 --- a/crates/orchestrator/src/cli/cron/event_bridge.rs +++ b/crates/orchestrator/src/cli/cron/event_bridge.rs @@ -1,16 +1,23 @@ use clap::Args; +use crate::cron::event_bridge::EventBridgeType; + /// CLI arguments for the aws event bridge. #[derive(Debug, Clone, Args)] -#[group()] +#[group(requires_all = ["aws_event_bridge"])] pub struct AWSEventBridgeCliArgs { /// Use the AWS Event Bridge client #[arg(long)] pub aws_event_bridge: bool, + /// The type of Event Bridge to use (rule or schedule) + #[arg(env = "MADARA_ORCHESTRATOR_EVENT_BRIDGE_TYPE", long, value_enum)] + pub event_bridge_type: Option, + /// The name of the queue for the event bridge #[arg(env = "MADARA_ORCHESTRATOR_EVENT_BRIDGE_TARGET_QUEUE_NAME", long, default_value = Some("madara_orchestrator_worker_trigger_queue"), help = "The name of the SNS queue to send messages to from the event bridge.")] pub target_queue_name: Option, + /// The cron time for the event bridge trigger rule. #[arg(env = "MADARA_ORCHESTRATOR_EVENT_BRIDGE_CRON_TIME", long, default_value = Some("60"), help = "The cron time for the event bridge trigger rule. Defaults to 10 seconds.")] pub cron_time: Option, diff --git a/crates/orchestrator/src/cli/mod.rs b/crates/orchestrator/src/cli/mod.rs index d68d5b85..a78984c6 100644 --- a/crates/orchestrator/src/cli/mod.rs +++ b/crates/orchestrator/src/cli/mod.rs @@ -409,7 +409,10 @@ pub mod validate_params { aws_config_args: &AWSConfigCliArgs, ) -> Result { if aws_event_bridge_args.aws_event_bridge && aws_config_args.aws { + let cron_type = aws_event_bridge_args.event_bridge_type.clone().expect("Event Bridge type is required"); + Ok(CronValidatedArgs::AWSEventBridge(AWSEventBridgeValidatedArgs { + cron_type, target_queue_name: aws_event_bridge_args .target_queue_name .clone() @@ -426,12 +429,10 @@ pub mod validate_params { .trigger_rule_name .clone() .expect("Trigger rule name is required"), - trigger_role_name: aws_event_bridge_args .trigger_role_name .clone() .expect("Trigger role name is required"), - trigger_policy_name: aws_event_bridge_args .trigger_policy_name .clone() @@ -660,6 +661,7 @@ pub mod validate_params { validate_server_params, validate_service_params, validate_settlement_params, validate_snos_params, validate_storage_params, }; + use crate::cron::event_bridge::EventBridgeType; #[rstest] #[case(true)] @@ -863,6 +865,7 @@ pub mod validate_params { fn test_validate_cron_params(#[case] is_aws: bool) { let aws_event_bridge_args: AWSEventBridgeCliArgs = AWSEventBridgeCliArgs { aws_event_bridge: is_aws, + event_bridge_type: Some(EventBridgeType::Rule), target_queue_name: Some(String::from("test")), cron_time: Some(String::from("12")), trigger_rule_name: Some(String::from("test")), diff --git a/crates/orchestrator/src/cron/event_bridge.rs b/crates/orchestrator/src/cron/event_bridge.rs index 112665f0..bd62f121 100644 --- a/crates/orchestrator/src/cron/event_bridge.rs +++ b/crates/orchestrator/src/cron/event_bridge.rs @@ -1,10 +1,9 @@ use std::time::Duration; -use async_std::task::sleep; use async_trait::async_trait; use aws_config::SdkConfig; +use aws_sdk_eventbridge::types::{InputTransformer, RuleState, Target as EventBridgeTarget}; use aws_sdk_scheduler::types::{FlexibleTimeWindow, FlexibleTimeWindowMode, Target}; -use aws_sdk_scheduler::Client as SchedulerClient; use aws_sdk_sqs::types::QueueAttributeName; use aws_sdk_sqs::Client as SqsClient; use color_eyre::eyre::Ok; @@ -13,8 +12,21 @@ use super::{get_worker_trigger_message, TriggerArns}; use crate::cron::Cron; use crate::queue::job_queue::WorkerTriggerType; +#[derive(Clone, Debug, clap::ValueEnum)] +pub enum EventBridgeType { + Rule, + Schedule, +} + +#[derive(Clone, Debug)] +enum EventBridgeClient { + Rule(aws_sdk_eventbridge::Client), + Schedule(aws_sdk_scheduler::Client), +} + #[derive(Clone, Debug)] pub struct AWSEventBridgeValidatedArgs { + pub cron_type: EventBridgeType, pub target_queue_name: String, pub cron_time: Duration, pub trigger_rule_name: String, @@ -26,7 +38,7 @@ pub struct AWSEventBridge { target_queue_name: String, cron_time: Duration, trigger_rule_name: String, - client: SchedulerClient, + client: EventBridgeClient, queue_client: SqsClient, iam_client: aws_sdk_iam::Client, trigger_role_name: String, @@ -35,11 +47,16 @@ pub struct AWSEventBridge { impl AWSEventBridge { pub fn new_with_args(params: &AWSEventBridgeValidatedArgs, aws_config: &SdkConfig) -> Self { + let client = match params.cron_type { + EventBridgeType::Rule => EventBridgeClient::Rule(aws_sdk_eventbridge::Client::new(aws_config)), + EventBridgeType::Schedule => EventBridgeClient::Schedule(aws_sdk_scheduler::Client::new(aws_config)), + }; + Self { target_queue_name: params.target_queue_name.clone(), cron_time: params.cron_time, trigger_rule_name: params.trigger_rule_name.clone(), - client: aws_sdk_scheduler::Client::new(aws_config), + client, queue_client: aws_sdk_sqs::Client::new(aws_config), iam_client: aws_sdk_iam::Client::new(aws_config), trigger_role_name: params.trigger_role_name.clone(), @@ -66,6 +83,7 @@ impl Cron for AWSEventBridge { // Create IAM role for EventBridge let role_name = format!("{}-{}", self.trigger_role_name, uuid::Uuid::new_v4()); + // TODO: might need to change this accordingly to support rule, skipping for now let assume_role_policy = r#"{ "Version": "2012-10-17", "Statement": [{ @@ -113,7 +131,7 @@ impl Cron for AWSEventBridge { // Attach the policy to the role self.iam_client.attach_role_policy().role_name(&role_name).policy_arn(&policy_arn).send().await?; - sleep(Duration::from_secs(60)).await; + // sleep(Duration::from_secs(60)).await; Ok(TriggerArns { queue_arn: queue_arn.to_string(), role_arn: role_arn.to_string() }) } @@ -123,30 +141,61 @@ impl Cron for AWSEventBridge { trigger_type: &WorkerTriggerType, trigger_arns: &TriggerArns, ) -> color_eyre::Result<()> { - let trigger_name = format!("{}-{}", self.trigger_rule_name, trigger_type); - - // Set flexible time window (you can adjust this as needed) - let flexible_time_window = FlexibleTimeWindow::builder().mode(FlexibleTimeWindowMode::Off).build()?; - let message = get_worker_trigger_message(trigger_type.clone())?; - - // Create target for SQS queue - let target = Target::builder() - .arn(trigger_arns.queue_arn.clone()) - .role_arn(trigger_arns.role_arn.clone()) - .input(message) - .build()?; - - // Create the schedule - self.client - .create_schedule() - .name(trigger_name) - .schedule_expression_timezone("UTC") - .flexible_time_window(flexible_time_window) - .schedule_expression(duration_to_rate_string(self.cron_time)) - .target(target) - .send() - .await?; + let trigger_name = format!("{}-{}", self.trigger_rule_name, trigger_type); + println!("trigger_nametrigger_nametrigger_name {}", trigger_name); + + match self.client.clone() { + EventBridgeClient::Rule(client) => { + let input_transformer = + InputTransformer::builder().input_paths_map("time", "$.time").input_template(message).build()?; + + client + .put_rule() + .name(trigger_name.clone()) + .schedule_expression("rate(1 minute)") + .state(RuleState::Enabled) + .send() + .await?; + + client + .put_targets() + .rule(trigger_name.clone()) + .targets( + EventBridgeTarget::builder() + .id(uuid::Uuid::new_v4().to_string()) + .arn(trigger_arns.queue_arn.clone()) + .input_transformer(input_transformer.clone()) + .build()?, + ) + .send() + .await?; + } + EventBridgeClient::Schedule(client) => { + // Set flexible time window (you can adjust this as needed) + let flexible_time_window = FlexibleTimeWindow::builder().mode(FlexibleTimeWindowMode::Off).build()?; + + let message = get_worker_trigger_message(trigger_type.clone())?; + + // Create target for SQS queue + let target = Target::builder() + .arn(trigger_arns.queue_arn.clone()) + .role_arn(trigger_arns.role_arn.clone()) + .input(message) + .build()?; + + // Create the schedule + client + .create_schedule() + .name(trigger_name) + .schedule_expression_timezone("UTC") + .flexible_time_window(flexible_time_window) + .schedule_expression(duration_to_rate_string(self.cron_time)) + .target(target) + .send() + .await?; + } + }; Ok(()) } diff --git a/crates/orchestrator/src/jobs/constants.rs b/crates/orchestrator/src/jobs/constants.rs index 596017a2..07992401 100644 --- a/crates/orchestrator/src/jobs/constants.rs +++ b/crates/orchestrator/src/jobs/constants.rs @@ -7,3 +7,5 @@ pub const JOB_METADATA_STATE_UPDATE_LAST_FAILED_BLOCK_NO: &str = "last_failed_bl pub const JOB_METADATA_SNOS_BLOCK: &str = "block_number_to_run"; pub const JOB_METADATA_SNOS_FACT: &str = "snos_fact"; pub const JOB_METADATA_FAILURE_REASON: &str = "failure_reason"; +pub const JOB_METADATA_ERROR: &str = "error"; +pub const JOB_METADATA_PROCESSING_COMPLETED_AT: &str = "processing_completed_at"; diff --git a/crates/orchestrator/src/jobs/mod.rs b/crates/orchestrator/src/jobs/mod.rs index 2263090a..181d64f4 100644 --- a/crates/orchestrator/src/jobs/mod.rs +++ b/crates/orchestrator/src/jobs/mod.rs @@ -5,8 +5,9 @@ use std::sync::Arc; use std::time::{Duration, Instant}; use async_trait::async_trait; +use chrono::Utc; use color_eyre::eyre::{eyre, Context}; -use constants::JOB_METADATA_FAILURE_REASON; +use constants::{JOB_METADATA_ERROR, JOB_METADATA_FAILURE_REASON, JOB_METADATA_PROCESSING_COMPLETED_AT}; use conversion::parse_string; use da_job::DaError; use futures::FutureExt; @@ -186,7 +187,7 @@ pub async fn create_job( tracing::info!(log_type = "completed", category = "general", function_type = "create_job", block_no = %internal_id, "General create job completed for block"); let duration = start.elapsed(); ORCHESTRATOR_METRICS.block_gauge.record(parse_string(&internal_id)?, &attributes); - ORCHESTRATOR_METRICS.successful_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.successful_job_operations.add(1.0, &attributes); ORCHESTRATOR_METRICS.jobs_response_time.record(duration.as_secs_f64(), &attributes); Ok(()) } @@ -201,7 +202,7 @@ pub async fn process_job(id: Uuid, config: Arc) -> Result<(), JobError> tracing::info!(log_type = "starting", category = "general", function_type = "process_job", block_no = %internal_id, "General process job started for block"); tracing::Span::current().record("job", format!("{:?}", job.clone())); - tracing::Span::current().record("job_type", format!("{:?}", job.job_type.clone())); + tracing::Span::current().record("job_type", format!("{:?}", job.job_type)); tracing::Span::current().record("internal_id", job.internal_id.clone()); tracing::debug!(job_id = ?id, status = ?job.status, "Current job status"); @@ -234,6 +235,9 @@ pub async fn process_job(id: Uuid, config: Arc) -> Result<(), JobError> let external_id = match AssertUnwindSafe(job_handler.process_job(config.clone(), &mut job)).catch_unwind().await { Ok(Ok(external_id)) => { tracing::debug!(job_id = ?id, "Successfully processed job"); + // Add the time of processing to the metadata. + job.metadata + .insert(JOB_METADATA_PROCESSING_COMPLETED_AT.to_string(), Utc::now().timestamp_millis().to_string()); external_id } Ok(Err(e)) => { @@ -300,7 +304,7 @@ pub async fn process_job(id: Uuid, config: Arc) -> Result<(), JobError> tracing::info!(log_type = "completed", category = "general", function_type = "process_job", block_no = %internal_id, "General process job completed for block"); let duration = start.elapsed(); - ORCHESTRATOR_METRICS.successful_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.successful_job_operations.add(1.0, &attributes); ORCHESTRATOR_METRICS.block_gauge.record(parse_string(&job.internal_id)?, &attributes); ORCHESTRATOR_METRICS.jobs_response_time.record(duration.as_secs_f64(), &attributes); Ok(()) @@ -341,25 +345,49 @@ pub async fn verify_job(id: Uuid, config: Arc) -> Result<(), JobError> { let job_handler = factory::get_job_handler(&job.job_type).await; tracing::debug!(job_id = ?id, "Verifying job with handler"); let verification_status = job_handler.verify_job(config.clone(), &mut job).await?; - tracing::Span::current().record("verification_status", format!("{:?}", verification_status.clone())); + tracing::Span::current().record("verification_status", format!("{:?}", &verification_status)); + + let mut attributes = vec![ + KeyValue::new("operation_job_type", format!("{:?}", job.job_type)), + KeyValue::new("operation_type", "verify_job"), + KeyValue::new("operation_verification_status", format!("{:?}", &verification_status)), + ]; + let mut operation_job_status: Option = None; match verification_status { JobVerificationStatus::Verified => { tracing::info!(job_id = ?id, "Job verified successfully"); + match job + .metadata + .get(JOB_METADATA_PROCESSING_COMPLETED_AT) + .and_then(|time| time.parse::().ok()) + .map(|start| Utc::now().timestamp_millis() - start) + { + Some(time_taken) => ORCHESTRATOR_METRICS + .verification_time + .record(time_taken as f64, &[KeyValue::new("operation_job_type", format!("{:?}", job.job_type))]), + None => tracing::warn!("Failed to calculate verification time: Invalid or missing processing time"), + } + let mut metadata = job.metadata.clone(); + metadata.remove("processing_completed_at"); config .database() - .update_job(&job, JobItemUpdates::new().update_status(JobStatus::Completed).build()) + .update_job( + &job, + JobItemUpdates::new().update_metadata(metadata).update_status(JobStatus::Completed).build(), + ) .await .map_err(|e| { tracing::error!(job_id = ?id, error = ?e, "Failed to update job status to Completed"); JobError::Other(OtherError(e)) })?; + operation_job_status = Some(JobStatus::Completed); } JobVerificationStatus::Rejected(e) => { tracing::warn!(job_id = ?id, error = ?e, "Job verification rejected"); - let mut new_job = job.clone(); - new_job.metadata.insert("error".to_string(), e); - new_job.status = JobStatus::VerificationFailed; + let mut new_job_metadata = job.metadata.clone(); + new_job_metadata.insert(JOB_METADATA_ERROR.to_string(), e); + operation_job_status = Some(JobStatus::VerificationFailed); let process_attempts = get_u64_from_metadata(&job.metadata, JOB_PROCESS_ATTEMPT_METADATA_KEY) .map_err(|e| JobError::Other(OtherError(e)))?; @@ -376,7 +404,7 @@ pub async fn verify_job(id: Uuid, config: Arc) -> Result<(), JobError> { &job, JobItemUpdates::new() .update_status(JobStatus::VerificationFailed) - .update_metadata(new_job.metadata) + .update_metadata(new_job_metadata) .build(), ) .await @@ -411,40 +439,42 @@ pub async fn verify_job(id: Uuid, config: Arc) -> Result<(), JobError> { tracing::error!(job_id = ?id, error = ?e, "Failed to update job status to VerificationTimeout"); JobError::Other(OtherError(e)) })?; - return Ok(()); - } - let metadata = increment_key_in_metadata(&job.metadata, JOB_VERIFICATION_ATTEMPT_METADATA_KEY)?; + operation_job_status = Some(JobStatus::VerificationTimeout); + } else { + let metadata = increment_key_in_metadata(&job.metadata, JOB_VERIFICATION_ATTEMPT_METADATA_KEY)?; - config.database().update_job(&job, JobItemUpdates::new().update_metadata(metadata).build()).await.map_err( - |e| { - tracing::error!(job_id = ?id, error = ?e, "Failed to update job metadata"); + config + .database() + .update_job(&job, JobItemUpdates::new().update_metadata(metadata).build()) + .await + .map_err(|e| { + tracing::error!(job_id = ?id, error = ?e, "Failed to update job metadata"); + JobError::Other(OtherError(e)) + })?; + + tracing::debug!(job_id = ?id, "Adding job back to verification queue"); + add_job_to_verification_queue( + job.id, + &job.job_type, + Duration::from_secs(job_handler.verification_polling_delay_seconds()), + config.clone(), + ) + .await + .map_err(|e| { + tracing::error!(job_id = ?id, error = ?e, "Failed to add job to verification queue"); JobError::Other(OtherError(e)) - }, - )?; - - tracing::debug!(job_id = ?id, "Adding job back to verification queue"); - add_job_to_verification_queue( - job.id, - &job.job_type, - Duration::from_secs(job_handler.verification_polling_delay_seconds()), - config.clone(), - ) - .await - .map_err(|e| { - tracing::error!(job_id = ?id, error = ?e, "Failed to add job to verification queue"); - JobError::Other(OtherError(e)) - })?; + })?; + } } }; - let attributes = [ - KeyValue::new("operation_job_type", format!("{:?}", job.job_type)), - KeyValue::new("operation_type", "verify_job"), - ]; + if let Some(job_status) = operation_job_status { + attributes.push(KeyValue::new("operation_job_status", format!("{}", job_status))); + } tracing::info!(log_type = "completed", category = "general", function_type = "verify_job", block_no = %internal_id, "General verify job completed for block"); let duration = start.elapsed(); - ORCHESTRATOR_METRICS.successful_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.successful_job_operations.add(1.0, &attributes); ORCHESTRATOR_METRICS.jobs_response_time.record(duration.as_secs_f64(), &attributes); ORCHESTRATOR_METRICS.block_gauge.record(parse_string(&job.internal_id)?, &attributes); Ok(()) @@ -491,6 +521,9 @@ async fn move_job_to_failed(job: &JobItem, config: Arc, reason: String) { Ok(_) => { tracing::info!(log_type = "completed", category = "general", function_type = "handle_job_failure", block_no = %internal_id, "General handle job failure completed for block"); + ORCHESTRATOR_METRICS + .failed_jobs + .add(1.0, &[KeyValue::new("operation_job_type", format!("{:?}", job.job_type))]); Ok(()) } Err(e) => { diff --git a/crates/orchestrator/src/jobs/proving_job/mod.rs b/crates/orchestrator/src/jobs/proving_job/mod.rs index 138d2734..a034bb4e 100644 --- a/crates/orchestrator/src/jobs/proving_job/mod.rs +++ b/crates/orchestrator/src/jobs/proving_job/mod.rs @@ -149,10 +149,10 @@ impl Job for ProvingJob { } fn max_verification_attempts(&self) -> u64 { - 1200 + 300 } fn verification_polling_delay_seconds(&self) -> u64 { - 30 + 300 } } diff --git a/crates/orchestrator/src/metrics.rs b/crates/orchestrator/src/metrics.rs index 67679d17..9fddfbe7 100644 --- a/crates/orchestrator/src/metrics.rs +++ b/crates/orchestrator/src/metrics.rs @@ -9,8 +9,10 @@ register_metric!(ORCHESTRATOR_METRICS, OrchestratorMetrics); pub struct OrchestratorMetrics { pub block_gauge: Gauge, - pub successful_jobs: Counter, + pub successful_job_operations: Counter, + pub failed_job_operations: Counter, pub failed_jobs: Counter, + pub verification_time: Gauge, pub jobs_response_time: Gauge, pub db_calls_response_time: Gauge, } @@ -35,10 +37,17 @@ impl Metrics for OrchestratorMetrics { "block".to_string(), ); - let successful_jobs = register_counter_metric_instrument( + let successful_job_operations = register_counter_metric_instrument( &orchestrator_meter, - "successful_jobs".to_string(), - "A counter to show count of successful jobs over time".to_string(), + "successful_job_operations".to_string(), + "A counter to show count of successful job operations over time".to_string(), + "jobs".to_string(), + ); + + let failed_job_operations = register_counter_metric_instrument( + &orchestrator_meter, + "failed_job_operations".to_string(), + "A counter to show count of failed job operations over time".to_string(), "jobs".to_string(), ); @@ -49,20 +58,35 @@ impl Metrics for OrchestratorMetrics { "jobs".to_string(), ); + let verification_time = register_gauge_metric_instrument( + &orchestrator_meter, + "verification_time".to_string(), + "A gauge to show the time taken for verification of tasks".to_string(), + "ms".to_string(), + ); + let jobs_response_time = register_gauge_metric_instrument( &orchestrator_meter, "jobs_response_time".to_string(), "A gauge to show response time of jobs over time".to_string(), - "Time".to_string(), + "s".to_string(), ); let db_calls_response_time = register_gauge_metric_instrument( &orchestrator_meter, "db_calls_response_time".to_string(), "A gauge to show response time of jobs over time".to_string(), - "Time".to_string(), + "s".to_string(), ); - Self { block_gauge, successful_jobs, failed_jobs, jobs_response_time, db_calls_response_time } + Self { + block_gauge, + successful_job_operations, + failed_job_operations, + failed_jobs, + verification_time, + jobs_response_time, + db_calls_response_time, + } } } diff --git a/crates/orchestrator/src/routes/job_routes.rs b/crates/orchestrator/src/routes/job_routes.rs index 7f3cd489..a10730af 100644 --- a/crates/orchestrator/src/routes/job_routes.rs +++ b/crates/orchestrator/src/routes/job_routes.rs @@ -43,8 +43,7 @@ async fn handle_process_job_request( ApiResponse::success(response).into_response() } Err(e) => { - let attributes = [KeyValue::new("operation_type", "process_job")]; - ORCHESTRATOR_METRICS.failed_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.failed_job_operations.add(1.0, &[KeyValue::new("operation_type", "process_job")]); ApiResponse::::error(e.to_string()).into_response() } } @@ -69,8 +68,7 @@ async fn handle_verify_job_request( ApiResponse::success(response).into_response() } Err(e) => { - let attributes = [KeyValue::new("operation_type", "verify_job")]; - ORCHESTRATOR_METRICS.failed_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.failed_job_operations.add(1.0, &[KeyValue::new("operation_type", "verify_job")]); ApiResponse::::error(e.to_string()).into_response() } } diff --git a/crates/orchestrator/src/workers/data_submission_worker.rs b/crates/orchestrator/src/workers/data_submission_worker.rs index bef5cc3f..24585435 100644 --- a/crates/orchestrator/src/workers/data_submission_worker.rs +++ b/crates/orchestrator/src/workers/data_submission_worker.rs @@ -34,7 +34,7 @@ impl Worker for DataSubmissionWorker { KeyValue::new("operation_job_type", format!("{:?}", JobType::DataSubmission)), KeyValue::new("operation_type", format!("{:?}", "create_job")), ]; - ORCHESTRATOR_METRICS.failed_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.failed_job_operations.add(1.0, &attributes); } } } diff --git a/crates/orchestrator/src/workers/mod.rs b/crates/orchestrator/src/workers/mod.rs index 1a909a8b..ec61659a 100644 --- a/crates/orchestrator/src/workers/mod.rs +++ b/crates/orchestrator/src/workers/mod.rs @@ -49,7 +49,7 @@ pub trait Worker: Send + Sync { async fn is_worker_enabled(&self, config: Arc) -> color_eyre::Result { let failed_jobs = config .database() - .get_jobs_by_statuses(vec![JobStatus::VerificationFailed, JobStatus::VerificationTimeout], Some(1)) + .get_jobs_by_statuses(vec![JobStatus::Failed, JobStatus::VerificationTimeout], Some(1)) .await?; if !failed_jobs.is_empty() { diff --git a/crates/orchestrator/src/workers/proving.rs b/crates/orchestrator/src/workers/proving.rs index 20b0df9d..66c52dfe 100644 --- a/crates/orchestrator/src/workers/proving.rs +++ b/crates/orchestrator/src/workers/proving.rs @@ -35,7 +35,7 @@ impl Worker for ProvingWorker { KeyValue::new("operation_job_type", format!("{:?}", JobType::ProofCreation)), KeyValue::new("operation_type", format!("{:?}", "create_job")), ]; - ORCHESTRATOR_METRICS.failed_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.failed_job_operations.add(1.0, &attributes); } } } diff --git a/crates/orchestrator/src/workers/snos.rs b/crates/orchestrator/src/workers/snos.rs index 552da715..5af863c7 100644 --- a/crates/orchestrator/src/workers/snos.rs +++ b/crates/orchestrator/src/workers/snos.rs @@ -56,7 +56,7 @@ impl Worker for SnosWorker { KeyValue::new("operation_job_type", format!("{:?}", JobType::SnosRun)), KeyValue::new("operation_type", format!("{:?}", "create_job")), ]; - ORCHESTRATOR_METRICS.failed_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.failed_job_operations.add(1.0, &attributes); } } } diff --git a/crates/orchestrator/src/workers/update_state.rs b/crates/orchestrator/src/workers/update_state.rs index dd989aba..a6b7da2d 100644 --- a/crates/orchestrator/src/workers/update_state.rs +++ b/crates/orchestrator/src/workers/update_state.rs @@ -125,7 +125,7 @@ impl Worker for UpdateStateWorker { KeyValue::new("operation_job_type", format!("{:?}", JobType::StateTransition)), KeyValue::new("operation_type", format!("{:?}", "create_job")), ]; - ORCHESTRATOR_METRICS.failed_jobs.add(1.0, &attributes); + ORCHESTRATOR_METRICS.failed_job_operations.add(1.0, &attributes); return Err(e.into()); } } diff --git a/crates/settlement-clients/ethereum/src/lib.rs b/crates/settlement-clients/ethereum/src/lib.rs index 55cc3fe9..ce620ae4 100644 --- a/crates/settlement-clients/ethereum/src/lib.rs +++ b/crates/settlement-clients/ethereum/src/lib.rs @@ -49,9 +49,9 @@ const Y_LOW_POINT_OFFSET: usize = 14; const Y_HIGH_POINT_OFFSET: usize = Y_LOW_POINT_OFFSET + 1; // Ethereum Transaction Finality -const MAX_TX_FINALISATION_ATTEMPTS: usize = 100; +const MAX_TX_FINALISATION_ATTEMPTS: usize = 30; const REQUIRED_BLOCK_CONFIRMATIONS: u64 = 3; -const TX_WAIT_SLEEP_DELAY_SECS: u64 = 5; +const TX_WAIT_SLEEP_DELAY_SECS: u64 = 60; lazy_static! { pub static ref PROJECT_ROOT: PathBuf = PathBuf::from(format!("{}/../../../", env!("CARGO_MANIFEST_DIR"))); diff --git a/dashboards/Grafana/dashboard_v1.json b/dashboards/Grafana/dashboard_v1.json new file mode 100644 index 00000000..a126e59e --- /dev/null +++ b/dashboards/Grafana/dashboard_v1.json @@ -0,0 +1,2100 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 1052, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 4, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.3.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "block_state{operation_type=\"create_job\", operation_job_type=\"SnosRun\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - create job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "block_state{operation_type=\"process_job\", operation_job_type=\"SnosRun\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - process job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "block_state{operation_type=\"verify_job\", operation_job_type=\"SnosRun\", job=\"$meter_service\", operation_verification_status=\"Verified\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - verify job", + "useBackend": false + } + ], + "title": "Snos - latest block completed per job", + "transparent": true, + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 18, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.3.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "block_state{operation_type=\"create_job\", operation_job_type=\"ProofCreation\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - create job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "block_state{operation_type=\"process_job\", operation_job_type=\"ProofCreation\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - process job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "block_state{operation_type=\"verify_job\", operation_job_type=\"ProofCreation\", job=\"$meter_service\", operation_verification_status=\"Verified\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - verify job", + "useBackend": false + } + ], + "title": "ProofCreation - latest block completed per job", + "transparent": true, + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 8 + }, + "id": 19, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.3.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "block_state{operation_type=\"create_job\", operation_job_type=\"DataSubmission\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - create job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "block_state{operation_type=\"process_job\", operation_job_type=\"DataSubmission\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - process job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "block_state{operation_type=\"verify_job\", operation_job_type=\"DataSubmission\", job=\"$meter_service\", operation_verification_status=\"Verified\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - verify job", + "useBackend": false + } + ], + "title": "DataSubmission - latest block completed per job", + "transparent": true, + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 8 + }, + "id": 20, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.3.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "block_state{operation_type=\"create_job\", operation_job_type=\"StateTransition\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - create job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "block_state{operation_type=\"process_job\", operation_job_type=\"StateTransition\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - process job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "block_state{operation_type=\"verify_job\", operation_job_type=\"StateTransition\", job=\"$meter_service\", operation_verification_status=\"Verified\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_type}}", + "range": true, + "refId": "Snos - verify job", + "useBackend": false + } + ], + "title": "StateTransition - latest block completed per job", + "transparent": true, + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "description": "grouped by job_type", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 16 + }, + "id": 7, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"create_job\", job=\"$meter_service\", operation_job_type=\"DataSubmission\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "DA" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"create_job\", job=\"$meter_service\", operation_job_type=\"SnosRun\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "Snos" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"create_job\", job=\"$meter_service\", operation_job_type=\"ProofCreation\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"create_job\", job=\"$meter_service\", operation_job_type=\"StateTransition\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "B" + } + ], + "title": "Successful Create Jobs over time (since last start)", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "description": "grouped by job_type", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 16 + }, + "id": 9, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"create_job\", job=\"$meter_service\", operation_job_type=\"SnosRun\" }", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"create_job\", job=\"$meter_service\", operation_job_type=\"ProofCreation\" }", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"create_job\", job=\"$meter_service\", operation_job_type=\"DataSubmission\" }", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"create_job\", job=\"$meter_service\", operation_job_type=\"StateTransition\" }", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "D" + } + ], + "title": "Failed Create Jobs over time", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "description": "grouped by job_type", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 24 + }, + "id": 12, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "(successful_job_operations_jobs_total{operation_type=\"process_job\", job=\"$meter_service\", operation_job_type=\"SnosRun\"})", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "(successful_job_operations_jobs_total{operation_type=\"process_job\", job=\"$meter_service\", operation_job_type=\"ProofCreation\"})", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "(successful_job_operations_jobs_total{operation_type=\"process_job\", job=\"$meter_service\", operation_job_type=\"DataSubmission\"})", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "(successful_job_operations_jobs_total{operation_type=\"process_job\", job=\"$meter_service\", operation_job_type=\"StateTransition\"})", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "D" + } + ], + "title": "Successful Process Jobs over time (since last start)", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "description": "grouped by job_type", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 24 + }, + "id": 21, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"process_job\", job=\"$meter_service\", operation_job_type=\"SnosRun\" }", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"process_job\", job=\"$meter_service\", operation_job_type=\"ProofCreation\" }", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"process_job\", job=\"$meter_service\", operation_job_type=\"DataSubmission\" }", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"process_job\", job=\"$meter_service\", operation_job_type=\"StateTransition\" }", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "D" + } + ], + "title": "Failed Process Jobs over time", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "description": "grouped by job_type", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 32 + }, + "id": 10, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_type=\"SnosRun\", operation_verification_status=\"Verified\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "Snos" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_type=\"ProofCreation\", operation_verification_status=\"Verified\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "ProofCreation" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_type=\"DataSubmission\", operation_verification_status=\"Verified\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "DataSubmission" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_type=\"StateTransition\", operation_verification_status=\"Verified\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "StateTransition" + } + ], + "title": "Successful Verify Jobs over time (since last start)", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "description": "grouped by job_type", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 32 + }, + "id": 22, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_type=\"SnosRun\", operation_verification_status=\"Rejected\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_type=\"ProofCreation\", operation_verification_status=\"Rejected\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_type=\"DataSubmission\", operation_verification_status=\"Rejected\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "failed_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_type=\"StateTransition\", operation_verification_status=\"Rejected\"}", + "hide": false, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "D" + } + ], + "title": "Failed Verify Jobs over time", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s", + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 40 + }, + "id": 16, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "jobs_response_time_seconds{job=\"$meter_service\", operation_type=\"create_job\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Create Job response time over time", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s", + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 40 + }, + "id": 15, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "jobs_response_time_seconds{job=\"$meter_service\", operation_type=\"process_job\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Process Job response time over time", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s", + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 48 + }, + "id": 17, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "jobs_response_time_seconds{job=\"$meter_service\", operation_type=\"verify_job\", operation_verification_status=\"Verified\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Verify Job response time over time", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "fieldMinMax": false, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s", + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 48 + }, + "id": 14, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.3.3", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"update_job\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "update_job", + "range": true, + "refId": "update_job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"create_job\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "create_job", + "range": true, + "refId": "create_job", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"get_job_by_id\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "get_job_by_id", + "range": true, + "refId": "get_job_by_id", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"get_job_by_internal_id_and_type\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "get_job_by_internal_id_and_type", + "range": true, + "refId": "get_job_by_internal_id_and_type", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"get_latest_job_by_type\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "get_latest_job_by_type", + "range": true, + "refId": "get_latest_job_by_type", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"get_jobs_without_successor\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "get_jobs_without_successor", + "range": true, + "refId": "get_jobs_without_successor", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"get_latest_job_by_type_and_status\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "get_latest_job_by_type_and_status", + "range": true, + "refId": "get_latest_job_by_type_and_status", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"get_jobs_after_internal_id_by_job_type\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "get_jobs_after_internal_id_by_job_type", + "range": true, + "refId": "get_jobs_after_internal_id_by_job_type", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "quantile(0.99, db_calls_response_time_seconds{job=\"$meter_service\", db_operation_name=\"get_jobs_by_statuses\"})", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "get_jobs_by_statuses", + "range": true, + "refId": "get_jobs_by_statuses", + "useBackend": false + } + ], + "title": "Response time of DB queries (per method)", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "ms", + "unitScale": true + }, + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "ProofCreation" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 56 + }, + "id": 23, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "verification_time_milliseconds{operation_job_type=\"ProofCreation\", job=\"$meter_service\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Verification time for Proof Creation", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 56 + }, + "id": 24, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "failed_jobs_total{job=\"$meter_service\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "successful_job_operations_jobs_total{operation_type=\"verify_job\", job=\"$meter_service\", operation_job_status=\"VerificationTimeout\"}", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "{{operation_job_type}}", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "Failed Jobs", + "transparent": true, + "type": "timeseries" + } + ], + "refresh": "", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [ + { + "current": { + "selected": true, + "text": [ + "madara_orchestrator_meter_service" + ], + "value": [ + "madara_orchestrator_meter_service" + ] + }, + "description": "Name of the meter service that points to madara orchestrator you want to read.", + "hide": 0, + "includeAll": false, + "multi": true, + "name": "meter_service", + "options": [ + { + "selected": true, + "text": "madara_orchestrator_meter_service", + "value": "madara_orchestrator_meter_service" + } + ], + "query": "madara_orchestrator_meter_service", + "queryValue": "", + "skipUrlSync": false, + "type": "custom" + } + ] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": {}, + "timezone": "browser", + "title": "Madara Orchestrator", + "uid": "fe607qql0bawwb", + "version": 24, + "weekStart": "" +} \ No newline at end of file diff --git a/dashboards/Signoz_Dashboard.json b/dashboards/Signoz/dashboard_v1.json similarity index 100% rename from dashboards/Signoz_Dashboard.json rename to dashboards/Signoz/dashboard_v1.json diff --git a/e2e-tests/src/node.rs b/e2e-tests/src/node.rs index 0603cb40..f1be9de2 100644 --- a/e2e-tests/src/node.rs +++ b/e2e-tests/src/node.rs @@ -75,7 +75,8 @@ impl Orchestrator { command.stdout(Stdio::piped()).stderr(Stdio::piped()); } else { command.arg("--aws-event-bridge"); - + command.arg("--event-bridge-type"); + command.arg("rule"); // For setup mode, inherit the stdio to show output directly command.stdout(Stdio::inherit()).stderr(Stdio::inherit()); } diff --git a/e2e-tests/tests.rs b/e2e-tests/tests.rs index 4adac029..520a0ea1 100644 --- a/e2e-tests/tests.rs +++ b/e2e-tests/tests.rs @@ -4,8 +4,6 @@ use std::io::Read; use std::time::{Duration, Instant}; use aws_config::meta::region::RegionProviderChain; -use aws_sdk_eventbridge::types::{InputTransformer, RuleState, Target}; -use aws_sdk_sqs::types::QueueAttributeName; use chrono::{SubsecRound, Utc}; use e2e_tests::anvil::AnvilSetup; use e2e_tests::mock_server::MockResponseBodyType; @@ -15,7 +13,6 @@ use e2e_tests::utils::{get_mongo_db_client, read_state_update_from_file, vec_u8_ use e2e_tests::{MongoDbServer, Orchestrator}; use mongodb::bson::doc; use orchestrator::cli::database::DatabaseValidatedArgs; -use orchestrator::cron::{get_worker_trigger_message, WORKER_TRIGGERS}; use orchestrator::data_storage::DataStorage; use orchestrator::database::mongodb::MongoDBValidatedArgs; use orchestrator::jobs::constants::{JOB_METADATA_SNOS_BLOCK, JOB_METADATA_STATE_UPDATE_BLOCKS_TO_SETTLE_KEY}; @@ -170,15 +167,6 @@ async fn test_orchestrator_workflow(#[case] l2_block_number: String) { println!("āœ… Orchestrator setup completed."); - let trigger_rule_name = &get_env_var_or_panic("MADARA_ORCHESTRATOR_EVENT_BRIDGE_TRIGGER_RULE_NAME"); - let target_queue_name = &get_env_var_or_panic("MADARA_ORCHESTRATOR_EVENT_BRIDGE_TARGET_QUEUE_NAME"); - - // Setup eventbridge rules - create_event_bridge_rule(trigger_rule_name, target_queue_name).await.expect( - "Unable to create - event bridge rule", - ); - // Run orchestrator let mut orchestrator = Orchestrator::new(OrchestratorMode::Run, setup_config.envs()).expect("Failed to start orchestrator"); @@ -244,58 +232,6 @@ async fn test_orchestrator_workflow(#[case] l2_block_number: String) { #[tokio::test] async fn test_orchestration_workflow_l3(#[case] _l3_block_number: String) {} -/// Function that adds rules to tests for localstack -/// This can be removed after https://github.com/localstack/localstack/issues/9861 is closed -async fn create_event_bridge_rule(trigger_rule_name: &String, target_queue_name: &String) -> color_eyre::Result<()> { - let aws_config = &aws_config::from_env().load().await; - - let queue_client = aws_sdk_sqs::Client::new(aws_config); - - let event_bridge_client = aws_sdk_eventbridge::Client::new(aws_config); - - let queue_url = queue_client.get_queue_url().queue_name(target_queue_name).send().await?; - - let queue_attributes = queue_client - .get_queue_attributes() - .queue_url(queue_url.queue_url.unwrap()) - .attribute_names(QueueAttributeName::QueueArn) - .send() - .await?; - let queue_arn = queue_attributes.attributes().unwrap().get(&QueueAttributeName::QueueArn).unwrap(); - - // Create the EventBridge target with the input transformer - - for trigger in WORKER_TRIGGERS.iter() { - let message = get_worker_trigger_message(trigger.clone())?; - let input_transformer = - InputTransformer::builder().input_paths_map("time", "$.time").input_template(message).build()?; - - let trigger_name = format!("{}-{}", trigger_rule_name, trigger); - event_bridge_client - .put_rule() - .name(trigger_name.clone()) - .schedule_expression("rate(1 minute)") - .state(RuleState::Enabled) - .send() - .await?; - - event_bridge_client - .put_targets() - .rule(trigger_name.clone()) - .targets( - Target::builder() - .id(uuid::Uuid::new_v4().to_string()) - .arn(queue_arn) - .input_transformer(input_transformer.clone()) - .build()?, - ) - .send() - .await?; - } - - Ok(()) -} - /// Function to check db for expected state continuously async fn wait_for_db_state( timeout: Duration, diff --git a/madara-bootstrapper b/madara-bootstrapper index b0b64750..f717bf17 160000 --- a/madara-bootstrapper +++ b/madara-bootstrapper @@ -1 +1 @@ -Subproject commit b0b647500c2ae3e3b0d99e345fa652989bca4726 +Subproject commit f717bf179581da53d68fee03b50ef78e0628ee20 diff --git a/scripts/artifacts/eth/MockGPSVerifier.sol b/scripts/artifacts/eth/MockGPSVerifier.sol new file mode 100644 index 00000000..a5ddb007 --- /dev/null +++ b/scripts/artifacts/eth/MockGPSVerifier.sol @@ -0,0 +1,9 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +contract MockGPSVerifier { + // Returns true for any input fact hash + function isValid(bytes32) public pure returns (bool) { + return true; + } +} \ No newline at end of file diff --git a/scripts/dummy_contract_deployment.sh b/scripts/dummy_contract_deployment.sh new file mode 100755 index 00000000..2e3162f6 --- /dev/null +++ b/scripts/dummy_contract_deployment.sh @@ -0,0 +1,150 @@ +#!/bin/bash + +# Check if jq is installed +if ! command -v jq &> /dev/null; then + echo "Error: jq is required but not installed. Please install jq first." + exit 1 +fi + +# Check if curl is installed +if ! command -v curl &> /dev/null; then + echo "Error: curl is required but not installed. Please install curl first." + exit 1 +fi + +# Check if required arguments are provided +if [ -z "$1" ] || [ -z "$2" ]; then + echo "Usage: $0 " + echo "Example: $0 http://localhost:9944 66644" + exit 1 +fi + +# Read arguments +ABI_FILE='./e2e-tests/artifacts/contracts/Starknet.json' +RPC_URL=$1 +BLOCK_NUMBER=$2 + +# Default Anvil private key +PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" +ANVIL_URL="http://localhost:8545" + +echo -e "\nšŸ” Fetching state update for block $BLOCK_NUMBER..." + +# Fetch state update from RPC with correct params structure +STATE_UPDATE=$(curl -s -X POST -H "Content-Type: application/json" --data "{ + \"jsonrpc\":\"2.0\", + \"method\":\"starknet_getStateUpdate\", + \"params\": { + \"block_id\": { + \"block_number\": $BLOCK_NUMBER + } + }, + \"id\":1 +}" "$RPC_URL") + +# Extract global root and block hash from the response +GLOBAL_ROOT=$(echo "$STATE_UPDATE" | jq -r '.result.new_root') +BLOCK_HASH=$(echo "$STATE_UPDATE" | jq -r '.result.block_hash') + +if [ "$GLOBAL_ROOT" == "null" ] || [ "$BLOCK_HASH" == "null" ]; then + echo "Error: Failed to fetch state update data" + echo "Response: $STATE_UPDATE" + exit 1 +fi + +echo -e "\nšŸ“Š State Update Data:" +echo " Global Root: $GLOBAL_ROOT" +echo " Block Hash: $BLOCK_HASH" +echo "" + +# Deploy the verifier contract using forge create +echo -e "šŸš€ Deploying verifier contract...\n" +VERIFIER_RESULT=$(forge create \ + --rpc-url "$ANVIL_URL" \ + --private-key "$PRIVATE_KEY" \ + "scripts/artifacts/eth/MockGPSVerifier.sol:MockGPSVerifier" \ + 2>&1) + +if [ $? -ne 0 ]; then + echo "Error deploying verifier contract:" + echo "$VERIFIER_RESULT" + exit 1 +fi + +# Extract contract address from forge create output +VERIFIER_ADDRESS=$(echo "$VERIFIER_RESULT" | grep "Deployed to" | awk '{print $3}') +echo -e "šŸ“¦ Verifier deployed at: $VERIFIER_ADDRESS\n" + +# Now deploy the main Starknet contract +echo -e "šŸš€ Deploying Starknet contract...\n" + +# Extract bytecode from the JSON file +BYTECODE=$(jq -r '.bytecode.object' "$ABI_FILE" | sed 's/^0x//') + +if [ "$BYTECODE" == "null" ] || [ -z "$BYTECODE" ]; then + echo "Error: No bytecode found in the JSON file" + exit 1 +fi + +# Deploy the contract using cast +RESULT=$(cast send \ + --private-key "$PRIVATE_KEY" \ + --rpc-url "$ANVIL_URL" \ + --create "0x$BYTECODE" \ + 2>&1) + +# Check if deployment was successful +if [ $? -eq 0 ]; then + # Extract contract address from the result using grep and awk + CONTRACT_ADDRESS=$(echo "$RESULT" | grep "contractAddress" | awk '{print $2}') + + if [ -n "$CONTRACT_ADDRESS" ]; then + echo -e "šŸ“¦ Starknet contract deployed successfully at: $CONTRACT_ADDRESS\n" + + # sleep for 2 seconds + sleep 2 + + # Initialize the contract with the required data + echo -e "šŸ”§ Initializing contract...\n" + + # Create the initialization data + PROGRAM_HASH="853638403225561750106379562222782223909906501242604214771127703946595519856" + AGGREGATOR_PROGRAM_HASH="0" + CONFIG_HASH="1773546093672122186726825451867439478968296982619761985456743675021283370179" + + # Encode the initialization data + INIT_DATA=$(cast abi-encode "f(uint256,uint256,address,uint256,uint256,int256,uint256)" \ + $PROGRAM_HASH \ + $AGGREGATOR_PROGRAM_HASH \ + $VERIFIER_ADDRESS \ + $CONFIG_HASH \ + $GLOBAL_ROOT \ + $BLOCK_NUMBER \ + $BLOCK_HASH) + + # Call initializeContractState + INIT_RESULT=$(cast send \ + --private-key "$PRIVATE_KEY" \ + --rpc-url "$ANVIL_URL" \ + $CONTRACT_ADDRESS \ + "initializeContractState(bytes)" \ + $INIT_DATA) + + if [ $? -eq 0 ]; then + TX_HASH=$(echo "$INIT_RESULT" | grep "transactionHash" | awk '{print $2}') + echo -e "āœ… Contract initialized successfully!" + echo -e " Transaction: $TX_HASH\n" + else + echo -e "āŒ Error initializing contract\n" + echo "$INIT_RESULT" + exit 1 + fi + else + echo "āŒ Error: Could not extract contract address from output" + exit 1 + fi +else + echo "āŒ Error deploying contract:" + echo "$RESULT" + exit 1 +fi \ No newline at end of file diff --git a/scripts/init_state.js b/scripts/init_state.js index 0c041972..474e39ab 100644 --- a/scripts/init_state.js +++ b/scripts/init_state.js @@ -18,7 +18,7 @@ const MADARA_ORCHESTRATOR_ETHEREUM_PRIVATE_KEY = const eth_provider = new ethers.JsonRpcProvider("http://localhost:8545"); const wallet = new ethers.Wallet( MADARA_ORCHESTRATOR_ETHEREUM_PRIVATE_KEY, - eth_provider, + eth_provider ); const starknet_provider = new starknet.RpcProvider({ @@ -56,7 +56,7 @@ async function getAppChainBalance(address) { const ethContract = new starknet.Contract( abi, ETHEREUM_APP_CHAIN_ADDRESS, - starknet_provider, + starknet_provider ); // Interaction with the contract with call @@ -69,16 +69,16 @@ async function bridgeToChain(bridge_address, starnet_expected_account_address) { const contract = new ethers.Contract( bridge_address, ["function deposit(uint256, uint256)"], - wallet, + wallet ); const initial_app_chain_balance = await getAppChainBalance( - starnet_expected_account_address, + starnet_expected_account_address ); const tx = await contract.deposit( ethers.parseEther("1"), starnet_expected_account_address, - { value: ethers.parseEther("1.01") }, + { value: ethers.parseEther("1.01") } ); tx.wait(); @@ -88,13 +88,13 @@ async function bridgeToChain(bridge_address, starnet_expected_account_address) { let counter = 10; while (counter--) { const final_app_chain_balance = await getAppChainBalance( - starnet_expected_account_address, + starnet_expected_account_address ); if (final_app_chain_balance > initial_app_chain_balance) { console.log( "šŸ’° App chain balance:", (final_app_chain_balance / 10n ** 18n).toString(), - "ETH", + "ETH" ); return; } @@ -121,7 +121,7 @@ function calculatePrefactualAccountAddress() { starkKeyPub, OZ_ACCOUNT_CLASS_HASH, OZaccountConstructorCallData, - 0, + 0 ); return { address: OZcontractAddress, @@ -172,14 +172,14 @@ async function validateBlockPassesSnosChecks(block_number) { async function deployStarknetAccount( starknet_private_key, starnet_expected_account_address, - starknet_account_public_key, + starknet_account_public_key ) { console.log("ā³ Deploying Starknet account..."); const account = new starknet.Account( starknet_provider, starnet_expected_account_address, starknet_private_key, - "1", + "1" ); const { transaction_hash, contract_address } = await account.deployAccount({ classHash: OZ_ACCOUNT_CLASS_HASH, @@ -211,7 +211,7 @@ async function waitForTransactionSuccess(hash) { // can run SNOS async function overrideStateOnCoreContract( block_number, - core_contract_address, + core_contract_address ) { let state_update = await starknet_provider.getStateUpdate(block_number); let abi = [ @@ -244,7 +244,7 @@ async function overrideStateOnCoreContract( const tx = await contract.updateStateOverride( state_update.new_root, block_number, - state_update.block_hash, + state_update.block_hash ); const receipt = await tx.wait(); if (!receipt.status) { @@ -296,13 +296,13 @@ async function setupMongoDb(block_number) { async function transfer( starknet_account_private_key, - starnet_expected_account_address, + starnet_expected_account_address ) { const account = new starknet.Account( starknet_provider, starnet_expected_account_address, starknet_account_private_key, - "1", + "1" ); const abi = [ { @@ -346,7 +346,7 @@ async function transfer( const contract = new starknet.Contract( abi, ETHEREUM_APP_CHAIN_ADDRESS, - starknet_provider, + starknet_provider ); let calldata = contract.populate("transfer", { recipient: "0x1234", @@ -361,7 +361,7 @@ async function transfer( txn_hash.transaction_hash, { retryInterval: 100, - }, + } ); if (!receipt.isSuccess()) { console.log("āŒ Failed to do a transfer on Starknet account"); @@ -371,7 +371,7 @@ async function transfer( // if txn is pending, block_number won't be available while (!receipt.block_number) { receipt = await starknet_provider.getTransactionReceipt( - txn_hash.transaction_hash, + txn_hash.transaction_hash ); await new Promise((resolve) => setTimeout(resolve, 200)); } @@ -387,19 +387,19 @@ async function transfer( async function upgradeETHToken( l2_eth_token_address, starknet_account_private_key, - starnet_expected_account_address, + starnet_expected_account_address ) { const account = new starknet.Account( starknet_provider, starnet_expected_account_address, starknet_account_private_key, - "1", + "1" ); // declare and deploy the new ERC20 contract // https://sepolia.starkscan.co/tx/0x04b5fa2a2e738a8b7a6c7b15194fbcf4409411743ebbe48cc5b83e5fe0edffdf console.log( - "ā„¹ļø Sending transaction to declare and deploy new ERC20 contract for ETH...", + "ā„¹ļø Sending transaction to declare and deploy new ERC20 contract for ETH..." ); let new_erc20_declare_deploy = await account.declareAndDeploy({ contract: require("./artifacts/starknet/new_eth_token.sierra.json"), @@ -433,12 +433,12 @@ async function upgradeETHToken( // add_implementation to bridge contarct before we upgrade // https://sepolia.starkscan.co/tx/0x064ab87819a2f8ebf91176eeb901f842c23ef6c97c107fe31b14defa352ba045 console.log( - "ā„¹ļø Sending transaction to add implementation to bridge contract...", + "ā„¹ļø Sending transaction to add implementation to bridge contract..." ); let eth_bridge = new starknet.Contract( require("./artifacts/starknet/bridge_proxy_legacy.json").abi, l2_eth_token_address, - account, + account ); let add_implementation_calldata = eth_bridge.populate("add_implementation", [ new_erc20_declare_deploy.deploy.address, @@ -447,7 +447,7 @@ async function upgradeETHToken( 0, // final ]); let add_implementation_txn_hash = await eth_bridge.add_implementation( - add_implementation_calldata.calldata, + add_implementation_calldata.calldata ); await waitForTransactionSuccess(add_implementation_txn_hash.transaction_hash); console.log("āœ… Transaction successful."); @@ -457,7 +457,7 @@ async function upgradeETHToken( console.log("ā„¹ļø Sending transaction to upgrade ETH token contract..."); let upgrade_txn_hash = await eth_bridge.upgrade_to( // the calldata is the same - add_implementation_calldata.calldata, + add_implementation_calldata.calldata ); await waitForTransactionSuccess(upgrade_txn_hash.transaction_hash); console.log("āœ… Transaction successful."); @@ -465,7 +465,7 @@ async function upgradeETHToken( // now add a new implementation to the bridge contract for the erc20 class hash // https://sepolia.starkscan.co/tx/0x051cc24816ec349c601bbd4e9afc8e0a8c7a93061aba372045bbf7e5d35aff7a console.log( - "ā„¹ļø Sending transaction to add new implementation to bridge contract...", + "ā„¹ļø Sending transaction to add new implementation to bridge contract..." ); let add_new_implementation_txn_hash = await account.execute([ { @@ -480,13 +480,13 @@ async function upgradeETHToken( }, ]); await waitForTransactionSuccess( - add_new_implementation_txn_hash.transaction_hash, + add_new_implementation_txn_hash.transaction_hash ); console.log("āœ… Transaction successful."); // finally replace the class hash on the ETH contract console.log( - "ā„¹ļø Sending transaction to replace class hash on the ETH contract...", + "ā„¹ļø Sending transaction to replace class hash on the ETH contract..." ); let replace_to_txn_hash = await account.execute([ { @@ -512,19 +512,19 @@ async function upgradeETHToken( async function upgradeETHBridge( l2_eth_bridge_address, starknet_account_private_key, - starnet_expected_account_address, + starnet_expected_account_address ) { const account = new starknet.Account( starknet_provider, starnet_expected_account_address, starknet_account_private_key, - "1", + "1" ); // declare and deploy the new ETH bridge contract // https://sepolia.starkscan.co/tx/0x05c266b9069c04f68752f5eb9652d7c0cd130c6d152d2267a8480273ec991de6 console.log( - "ā„¹ļø Sending transaction to declare and deploy new ETH bridge contract for ETH...", + "ā„¹ļø Sending transaction to declare and deploy new ETH bridge contract for ETH..." ); let new_bridge_declare_deploy = await account.declareAndDeploy({ contract: require("./artifacts/starknet/new_eth_bridge.sierra.json"), @@ -548,12 +548,12 @@ async function upgradeETHBridge( // add_implementation to bridge contarct before we upgrade // https://sepolia.starkscan.co/call/0x0721b02e1f4daa98ed8928966d66f345cb897f382274b22c89d86c00e755106d_1_1 console.log( - "ā„¹ļø Sending transaction to add implementation to bridge contract...", + "ā„¹ļø Sending transaction to add implementation to bridge contract..." ); let eth_bridge = new starknet.Contract( require("./artifacts/starknet/bridge_proxy_legacy.json").abi, l2_eth_bridge_address, - account, + account ); let add_implementation_calldata = eth_bridge.populate("add_implementation", [ new_bridge_declare_deploy.deploy.address, @@ -565,7 +565,7 @@ async function upgradeETHBridge( 0, // final ]); let add_implementation_txn_hash = await eth_bridge.add_implementation( - add_implementation_calldata.calldata, + add_implementation_calldata.calldata ); await waitForTransactionSuccess(add_implementation_txn_hash.transaction_hash); console.log("āœ… Transaction successful."); @@ -575,7 +575,7 @@ async function upgradeETHBridge( console.log("ā„¹ļø Sending transaction to upgrade ETH bridge contract..."); let upgrade_txn_hash = await eth_bridge.upgrade_to( // the calldata is the same - add_implementation_calldata.calldata, + add_implementation_calldata.calldata ); await waitForTransactionSuccess(upgrade_txn_hash.transaction_hash); console.log("āœ… Transaction successful."); @@ -583,7 +583,7 @@ async function upgradeETHBridge( // now add a new implementation to the bridge contract for the bridge class hash // https://sepolia.starkscan.co/tx/0x051cc24816ec349c601bbd4e9afc8e0a8c7a93061aba372045bbf7e5d35aff7a console.log( - "ā„¹ļø Sending transaction to add new implementation to bridge contract...", + "ā„¹ļø Sending transaction to add new implementation to bridge contract..." ); let add_new_implementation_txn_hash = await account.execute([ { @@ -598,13 +598,13 @@ async function upgradeETHBridge( }, ]); await waitForTransactionSuccess( - add_new_implementation_txn_hash.transaction_hash, + add_new_implementation_txn_hash.transaction_hash ); console.log("āœ… Transaction successful."); // finally replace the class hash on the ETH contract console.log( - "ā„¹ļø Sending transaction to replace class hash on the ETH contract...", + "ā„¹ļø Sending transaction to replace class hash on the ETH contract..." ); let replace_to_txn_hash = await account.execute([ { @@ -623,7 +623,7 @@ async function upgradeL1EthBridge(l1_bridge_address) { const contract = new ethers.ContractFactory( newEthBridge.abi, newEthBridge.bytecode, - wallet, + wallet ); const ethBridgeReceipt = await contract.deploy(); await ethBridgeReceipt.waitForDeployment(); @@ -635,7 +635,7 @@ async function upgradeL1EthBridge(l1_bridge_address) { const eicContract = new ethers.ContractFactory( newEic.abi, newEic.bytecode, - wallet, + wallet ); const eicReceipt = await eicContract.deploy(); await eicReceipt.waitForDeployment(); @@ -693,14 +693,14 @@ async function upgradeL1EthBridge(l1_bridge_address) { stateMutability: "payable", }, ], - wallet, + wallet ); // add new implementation to the bridge let addImplementationTxn = await bridge.addImplementation( ethBridgeAddress, chainHexesToBytes([eicAddress, "0x0", "0x0"]), - false, + false ); await addImplementationTxn.wait(); console.log("āœ… New implementation added to the bridge"); @@ -709,7 +709,7 @@ async function upgradeL1EthBridge(l1_bridge_address) { let upgradeToTxn = await bridge.upgradeTo( ethBridgeAddress, chainHexesToBytes([eicAddress, "0x0", "0x0"]), - false, + false ); await upgradeToTxn.wait(); console.log("āœ… Bridge upgraded to the new implementation"); @@ -763,8 +763,9 @@ async function main() { const bootstrapper_private_key = "0xabcd" || process.argv[7]; // add funds to boostrapper account - let bootstrapper_address_balance = - await getAppChainBalance(bootstrapper_address); + let bootstrapper_address_balance = await getAppChainBalance( + bootstrapper_address + ); if (bootstrapper_address_balance < 10n ** 17n) { await bridgeToChain(l1_bridge_address, bootstrapper_address); } else { @@ -772,21 +773,22 @@ async function main() { } // upgrade ETH token to Cairo 1 as SNOS breaks otherwise - const eth_token_class = - await starknet_provider.getClassAt(l2_eth_token_address); + const eth_token_class = await starknet_provider.getClassAt( + l2_eth_token_address + ); if (eth_token_class.sierra_program) { console.log("ā„¹ļø Eth token is already upgraded, proceeding"); } else { await upgradeETHToken( l2_eth_token_address, bootstrapper_private_key, - bootstrapper_address, + bootstrapper_address ); } // upgrade ETH bridge to Cairo 1 as well const l2_eth_bridge_class = await starknet_provider.getClassAt( - l2_eth_bridge_address, + l2_eth_bridge_address ); if (l2_eth_bridge_class.sierra_program) { console.log("ā„¹ļø Eth bridge is already upgraded, proceeding"); @@ -794,7 +796,7 @@ async function main() { await upgradeETHBridge( l2_eth_bridge_address, bootstrapper_private_key, - bootstrapper_address, + bootstrapper_address ); } @@ -802,14 +804,14 @@ async function main() { const l1BridgeContract = new ethers.Contract( l1_bridge_address, ["function identify() external view returns (string)"], - eth_provider, + eth_provider ); const identify = await l1BridgeContract.identify(); console.log("ā„¹ļø L1 ETH bridge identify:", identify); if ( identify.includes( // StarkWare_StarknetEthBridge_2023_1 - "StarkWare_StarknetEthBridge_2023_1", + "StarkWare_StarknetEthBridge_2023_1" ) ) { await upgradeL1EthBridge(l1_bridge_address); @@ -824,7 +826,7 @@ async function main() { } = calculatePrefactualAccountAddress(); console.log( "šŸ¦ Starknet expected account address:", - starnet_expected_account_address, + starnet_expected_account_address ); await bridgeToChain(l1_bridge_address, starnet_expected_account_address); @@ -832,7 +834,7 @@ async function main() { let block_number = await deployStarknetAccount( starknet_account_private_key, starnet_expected_account_address, - starknet_account_public_key, + starknet_account_public_key ); // SNOS doesn't seem to be able to run on deploy account block @@ -840,7 +842,7 @@ async function main() { block_number = await transfer( starknet_account_private_key, - starnet_expected_account_address, + starnet_expected_account_address ); await validateBlockPassesSnosChecks(block_number);