From 55c068d33aadf2783734d6653fca0fa0bad10511 Mon Sep 17 00:00:00 2001 From: Anton Baliasnikov Date: Fri, 27 Oct 2023 10:37:10 +0100 Subject: [PATCH 1/2] chore: remove unused iris module (#769) Signed-off-by: Anton Baliasnikov --- .github/labeler.yml | 2 - .github/workflows/deployment.yml | 2 +- build.sbt | 15 -- .../ConnectionRepositorySpecSuite.scala | 1 - infrastructure/local/update_env.sh | 2 - infrastructure/multi/.env | 1 - infrastructure/shared/docker-compose-demo.yml | 4 +- .../shared/docker-compose-mt-keycloak.yml | 6 +- infrastructure/shared/docker-compose.yml | 6 +- .../docker-compose.yml | 6 - iris/api/grpc/README.md | 9 - iris/api/grpc/protocol/did_operations.proto | 96 ---------- iris/api/grpc/protocol/dlt.proto | 37 ---- iris/api/grpc/protocol/vc_operations.proto | 19 -- iris/api/grpc/service.proto | 59 ------ iris/client/scala-client/CHANGELOG.md | 0 iris/client/scala-client/README.md | 8 - iris/service/CHANGELOG.md | 30 --- iris/service/README.md | 10 - .../iris/core/model/ConfirmedBlock.scala | 12 -- .../iris/core/model/ConfirmedIrisBatch.scala | 14 -- .../iohk/atala/iris/core/model/Models.scala | 7 - .../atala/iris/core/model/ledger/Block.scala | 16 -- .../iris/core/model/ledger/BlockError.scala | 10 - .../iris/core/model/ledger/BlockHash.scala | 18 -- .../iris/core/model/ledger/BlockHeader.scala | 10 - .../atala/iris/core/model/ledger/Funds.scala | 3 - .../atala/iris/core/model/ledger/Ledger.scala | 12 -- .../iris/core/model/ledger/Transaction.scala | 8 - .../model/ledger/TransactionDetails.scala | 3 - .../core/model/ledger/TransactionId.scala | 20 -- .../model/ledger/TransactionMetadata.scala | 148 --------------- .../core/model/ledger/TransactionStatus.scala | 16 -- .../core/repository/BlocksRepository.scala | 9 - .../repository/DbRepositoryTransactor.scala | 11 -- .../repository/IrisBatchesRepository.scala | 24 --- .../core/repository/KeyValueRepository.scala | 10 - .../repository/OperationsRepository.scala | 10 - .../iris/core/service/BlocksSaveSinker.scala | 61 ------ .../iris/core/service/BlocksStreamer.scala | 155 --------------- .../InmemoryUnderlyingLedgerService.scala | 166 ---------------- .../iris/core/service/PublishingService.scala | 17 -- .../service/UnderlyingLedgerService.scala | 20 -- .../core/worker/PublishingScheduler.scala | 16 -- .../mock/DummyDbRepositoryTransactor.scala | 12 -- .../mock/InMemoryIrisBatchesRepository.scala | 28 --- .../mock/InMemoryKeyValueRepository.scala | 21 -- .../core/service/BlockchainSyncSpec.scala | 118 ------------ .../InmemoryUnderlyingLedgerServiceSpec.scala | 179 ------------------ .../core/testutils/PublishThenAdjust.scala | 19 -- .../iris/core/testutils/RandomUtils.scala | 56 ------ iris/service/docker/docker-compose-local.yaml | 34 ---- .../migrations/sql/V1__init_tables.sql | 3 - .../src/main/resources/application.conf | 14 -- .../io/iohk/atala/iris/server/Main.scala | 8 - .../io/iohk/atala/iris/server/Modules.scala | 109 ----------- .../atala/iris/server/config/AppConfig.scala | 16 -- .../atala/iris/server/grpc/GrpcServer.scala | 38 ---- .../atala/iris/server/grpc/GrpcServices.scala | 15 -- .../grpc/service/IrisServiceGrpcImpl.scala | 101 ---------- .../sql/repository/JdbcBlocksRepository.scala | 17 -- .../JdbcDbRepositoryTransactorIO.scala | 16 -- .../repository/JdbcIrisBatchRepository.scala | 35 ---- .../repository/JdbcKeyValueRepository.scala | 33 ---- .../repository/JdbcOperationsRepository.scala | 30 --- .../iris/sql/repository/TransactorLayer.scala | 63 ------ .../atala/iris/sql/repository/package.scala | 12 -- .../core/model/PublishedBatchData.scala | 10 - .../model/error/CredentialServiceError.scala | 1 - .../core/service/MockCredentialService.scala | 4 +- .../service/CredentialServiceSpecHelper.scala | 5 - .../src/main/resources/application.conf | 9 - .../io/iohk/atala/agent/server/Modules.scala | 17 -- .../atala/agent/server/config/AppConfig.scala | 3 - .../issue/controller/IssueController.scala | 2 - .../controller/IssueControllerSpec.scala | 6 - .../controller/IssueControllerTestTools.scala | 7 - 77 files changed, 8 insertions(+), 2142 deletions(-) delete mode 100644 iris/api/grpc/README.md delete mode 100644 iris/api/grpc/protocol/did_operations.proto delete mode 100644 iris/api/grpc/protocol/dlt.proto delete mode 100644 iris/api/grpc/protocol/vc_operations.proto delete mode 100644 iris/api/grpc/service.proto delete mode 100644 iris/client/scala-client/CHANGELOG.md delete mode 100644 iris/client/scala-client/README.md delete mode 100644 iris/service/CHANGELOG.md delete mode 100644 iris/service/README.md delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedBlock.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedIrisBatch.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/Models.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Block.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockError.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHash.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHeader.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Funds.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Ledger.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Transaction.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionDetails.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionId.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionMetadata.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionStatus.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/BlocksRepository.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/DbRepositoryTransactor.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/IrisBatchesRepository.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/KeyValueRepository.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/OperationsRepository.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksSaveSinker.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksStreamer.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerService.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/PublishingService.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/UnderlyingLedgerService.scala delete mode 100644 iris/service/core/src/main/scala/io/iohk/atala/iris/core/worker/PublishingScheduler.scala delete mode 100644 iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/DummyDbRepositoryTransactor.scala delete mode 100644 iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryIrisBatchesRepository.scala delete mode 100644 iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryKeyValueRepository.scala delete mode 100644 iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/BlockchainSyncSpec.scala delete mode 100644 iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerServiceSpec.scala delete mode 100644 iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/PublishThenAdjust.scala delete mode 100644 iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/RandomUtils.scala delete mode 100644 iris/service/docker/docker-compose-local.yaml delete mode 100644 iris/service/migrations/sql/V1__init_tables.sql delete mode 100644 iris/service/server/src/main/resources/application.conf delete mode 100644 iris/service/server/src/main/scala/io/iohk/atala/iris/server/Main.scala delete mode 100644 iris/service/server/src/main/scala/io/iohk/atala/iris/server/Modules.scala delete mode 100644 iris/service/server/src/main/scala/io/iohk/atala/iris/server/config/AppConfig.scala delete mode 100644 iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServer.scala delete mode 100644 iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServices.scala delete mode 100644 iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/service/IrisServiceGrpcImpl.scala delete mode 100644 iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcBlocksRepository.scala delete mode 100644 iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcDbRepositoryTransactorIO.scala delete mode 100644 iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcIrisBatchRepository.scala delete mode 100644 iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcKeyValueRepository.scala delete mode 100644 iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcOperationsRepository.scala delete mode 100644 iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/TransactorLayer.scala delete mode 100644 iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/package.scala delete mode 100644 pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/PublishedBatchData.scala diff --git a/.github/labeler.yml b/.github/labeler.yml index e5b2b0b4cd..847b789f24 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -6,8 +6,6 @@ connect: - connect/**/* pollux: - pollux/**/* -iris: - - iris/**/* prism-agent: - prism-agent/**/* prism-node: diff --git a/.github/workflows/deployment.yml b/.github/workflows/deployment.yml index 3980caf026..ec53127b4c 100644 --- a/.github/workflows/deployment.yml +++ b/.github/workflows/deployment.yml @@ -15,7 +15,7 @@ on: workflow_dispatch: inputs: component-tag: - description: "Tag of a component to trigger the update, e.g. iris-service-v0.5.0" + description: "Tag of a component to trigger the update" required: true env: description: "Environment to trigger update on" diff --git a/build.sbt b/build.sbt index 0f04fe823d..59ef51fd9e 100644 --- a/build.sbt +++ b/build.sbt @@ -616,20 +616,6 @@ val prismNodeClient = project ) ) -// ############## -// ### iris #### -// ############## -val irisClient = project - .in(file("iris/client/scala-client")) - .settings( - name := "iris-client", - libraryDependencies ++= Seq(D.scalaPbGrpc, D.scalaPbRuntime), - coverageEnabled := false, - // gRPC settings - Compile / PB.targets := Seq(scalapb.gen() -> (Compile / sourceManaged).value / "scalapb"), - Compile / PB.protoSources := Seq(baseDirectory.value / ".." / ".." / "api" / "grpc") - ) - // ##################### // ##### castor ###### // ##################### @@ -677,7 +663,6 @@ lazy val polluxCore = project libraryDependencies ++= D_Pollux.coreDependencies ) .dependsOn(shared) - .dependsOn(irisClient) .dependsOn(prismAgentWalletAPI) .dependsOn(polluxVcJWT) .dependsOn(vc, resolver, agentDidcommx, eventNotification, polluxAnoncreds) diff --git a/connect/lib/core/src/test/scala/io/iohk/atala/connect/core/repository/ConnectionRepositorySpecSuite.scala b/connect/lib/core/src/test/scala/io/iohk/atala/connect/core/repository/ConnectionRepositorySpecSuite.scala index 0955c19297..d65f963227 100644 --- a/connect/lib/core/src/test/scala/io/iohk/atala/connect/core/repository/ConnectionRepositorySpecSuite.scala +++ b/connect/lib/core/src/test/scala/io/iohk/atala/connect/core/repository/ConnectionRepositorySpecSuite.scala @@ -8,7 +8,6 @@ import io.iohk.atala.mercury.protocol.connection.{ConnectionRequest, ConnectionR import io.iohk.atala.mercury.protocol.invitation.v2.Invitation import io.iohk.atala.shared.models.{WalletAccessContext, WalletId} import zio.test.* -import zio.test.Assertion.* import zio.{Cause, Exit, ZIO, ZLayer} import java.time.Instant diff --git a/infrastructure/local/update_env.sh b/infrastructure/local/update_env.sh index 50a308f4b1..a8831c9e51 100755 --- a/infrastructure/local/update_env.sh +++ b/infrastructure/local/update_env.sh @@ -5,8 +5,6 @@ ENV_FILE="${SCRIPT_DIR}/.env" pip install ${SCRIPT_DIR}/../utils/python/github-helpers > /dev/null 2>&1 -IRIS_SERVICE_VERSION=$(github get-latest-package-version --package iris-service --package-type container) PRISM_AGENT_VERSION=$(github get-latest-package-version --package prism-agent --package-type container) -sed -i.bak "s/IRIS_SERVICE_VERSION=.*/IRIS_SERVICE_VERSION=${IRIS_SERVICE_VERSION}/" ${ENV_FILE} && rm -f ${ENV_FILE}.bak sed -i.bak "s/PRISM_AGENT_VERSION=.*/PRISM_AGENT_VERSION=${PRISM_AGENT_VERSION}/" ${ENV_FILE} && rm -f ${ENV_FILE}.bak diff --git a/infrastructure/multi/.env b/infrastructure/multi/.env index f4acdb3a67..a6c57a3d91 100644 --- a/infrastructure/multi/.env +++ b/infrastructure/multi/.env @@ -1,3 +1,2 @@ MERCURY_MEDIATOR_VERSION=0.2.0 -IRIS_SERVICE_VERSION=0.1.0 PRISM_AGENT_VERSION=0.6.0 diff --git a/infrastructure/shared/docker-compose-demo.yml b/infrastructure/shared/docker-compose-demo.yml index 0fc0461c3e..a6ab6e0625 100644 --- a/infrastructure/shared/docker-compose-demo.yml +++ b/infrastructure/shared/docker-compose-demo.yml @@ -6,7 +6,7 @@ services: db: image: postgres:13 environment: - POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,iris,agent,node_db" + POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,agent,node_db" POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres volumes: @@ -14,7 +14,7 @@ services: - ./postgres/init-script.sh:/docker-entrypoint-initdb.d/init-script.sh - ./postgres/max_conns.sql:/docker-entrypoint-initdb.d/max_conns.sql healthcheck: - test: ["CMD", "pg_isready", "-U", "postgres", "-d", "iris"] + test: ["CMD", "pg_isready", "-U", "postgres", "-d", "agent"] interval: 10s timeout: 5s retries: 5 diff --git a/infrastructure/shared/docker-compose-mt-keycloak.yml b/infrastructure/shared/docker-compose-mt-keycloak.yml index da6033d048..c6af5eb386 100644 --- a/infrastructure/shared/docker-compose-mt-keycloak.yml +++ b/infrastructure/shared/docker-compose-mt-keycloak.yml @@ -10,7 +10,7 @@ services: db: image: postgres:13 environment: - POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,iris,agent,node_db" + POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,agent,node_db" POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres volumes: @@ -20,7 +20,7 @@ services: # ports: # - "5432:5432" healthcheck: - test: ["CMD", "pg_isready", "-U", "postgres", "-d", "iris"] + test: ["CMD", "pg_isready", "-U", "postgres", "-d", "agent"] interval: 10s timeout: 5s retries: 5 @@ -72,8 +72,6 @@ services: prism-agent: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor diff --git a/infrastructure/shared/docker-compose.yml b/infrastructure/shared/docker-compose.yml index 8b626ea58c..600bde8519 100644 --- a/infrastructure/shared/docker-compose.yml +++ b/infrastructure/shared/docker-compose.yml @@ -8,7 +8,7 @@ services: db: image: postgres:13 environment: - POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,iris,agent,node_db" + POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,agent,node_db" POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres volumes: @@ -18,7 +18,7 @@ services: ports: - "${PG_PORT:-5432}:5432" healthcheck: - test: ["CMD", "pg_isready", "-U", "postgres", "-d", "iris"] + test: ["CMD", "pg_isready", "-U", "postgres", "-d", "agent"] interval: 10s timeout: 5s retries: 5 @@ -70,8 +70,6 @@ services: prism-agent: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor diff --git a/infrastructure/single-tenant-testing-stack/docker-compose.yml b/infrastructure/single-tenant-testing-stack/docker-compose.yml index b5ca092a70..410c68076f 100644 --- a/infrastructure/single-tenant-testing-stack/docker-compose.yml +++ b/infrastructure/single-tenant-testing-stack/docker-compose.yml @@ -87,8 +87,6 @@ services: issuer-oea: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: issuer-db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor @@ -152,8 +150,6 @@ services: verifier-oea: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: verifier-db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor @@ -217,8 +213,6 @@ services: holder-oea: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: holder-db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor diff --git a/iris/api/grpc/README.md b/iris/api/grpc/README.md deleted file mode 100644 index 47abfff63c..0000000000 --- a/iris/api/grpc/README.md +++ /dev/null @@ -1,9 +0,0 @@ -## gRPC API ## - -We use gRPC messages for both: interactions with Iris, and also to serialise messages which reside in the DLT. - -`protocol` folder contains only definitions which are posted to the DLT and -basically define low-level protocol operations. - -Files outside the `protocol` folder describe messages and services which are used in gRPC interface of Iris, -using protocol messages in their definitions. diff --git a/iris/api/grpc/protocol/did_operations.proto b/iris/api/grpc/protocol/did_operations.proto deleted file mode 100644 index 4a6dd89301..0000000000 --- a/iris/api/grpc/protocol/did_operations.proto +++ /dev/null @@ -1,96 +0,0 @@ -syntax = "proto3"; - -import "scalapb/scalapb.proto"; - -option (scalapb.options) = { - no_default_values_in_constructor: true - package_name: "io.iohk.atala.iris.proto" -}; - -message PublicKeyJwk { - enum Curve { - SECP256K1 = 0; - } - message ECKeyData { - Curve curve = 1; // The curve name, like secp256k1. - bytes x = 2; // The x coordinate, represented as bytes. - bytes y = 3; // The y coordinate, represented as bytes. - } - - oneof key { - ECKeyData ec_key = 1; - } -} - -message DocumentDefinition { - message PublicKey { - string id = 1; - - PublicKeyJwk jwk = 2; - - enum Purpose { - AUTHENTICATION = 0; - KEY_AGREEMENT = 1; - ASSERTION_METHOD = 2; - CAPABILITY_INVOCATION = 3; - } - repeated Purpose purposes = 3; - } - - message Service { - string id = 1; - enum Type { - MEDIATOR_SERVICE = 0; - } - Type type = 2; - string service_endpoint = 3; - } - - repeated PublicKey public_keys = 1; - repeated Service services = 2; -} - -message CreateDid { - bytes initial_update_commitment = 1; - bytes initial_recovery_commitment = 2; - string ledger = 3; - DocumentDefinition document = 4; -} - -message UpdateDid { - message Patch { - oneof patch { - DocumentDefinition.PublicKey add_public_key = 1; - string remove_public_key = 2; - DocumentDefinition.Service add_service = 3; - string remove_service = 4; - } - } - - string did = 1; - string ledger = 2; - bytes revealed_update_key = 3; - bytes previous_version = 4; - bytes forward_update_commitment = 5; - repeated Patch patches = 6; - bytes signature = 7; -} - -message RecoverDid { - string did = 1; - string ledger = 2; - bytes revealed_recovery_key = 3; - bytes previous_version = 4; - bytes forward_update_commitment = 5; - bytes forward_recovery_commitment = 6; - repeated DocumentDefinition document = 7; - bytes signature = 8; -} - -message DeactivateDid { - string did = 1; - string ledger = 2; - bytes revealed_recovery_key = 3; - bytes previous_version = 4; - bytes signature = 5; -} diff --git a/iris/api/grpc/protocol/dlt.proto b/iris/api/grpc/protocol/dlt.proto deleted file mode 100644 index a366a7b540..0000000000 --- a/iris/api/grpc/protocol/dlt.proto +++ /dev/null @@ -1,37 +0,0 @@ -syntax = "proto3"; - -import "scalapb/scalapb.proto"; -import "protocol/vc_operations.proto"; -import "protocol/did_operations.proto"; - -option (scalapb.options) = { - no_default_values_in_constructor: true - package_name: "io.iohk.atala.iris.proto" -}; - -// The possible operations affecting the blockchain. -message IrisOperation { - // The actual operation. - oneof operation { - // Used to create a public DID. - CreateDid create_did = 1; - - // Used to update an existing public DID. - UpdateDid update_did = 2; - - // Used to recover an existing public DID. - RecoverDid recover_did = 3; - - // Used to deactivate DID - DeactivateDid deactivate_did = 4; - - IssueCredentialsBatch issue_credentials_batch = 5; - - RevokeCredentials revoke_credentials = 6; - }; -} - -// List of operations which will be stored in the blockchain transaction metadata -message IrisBatch { - repeated IrisOperation operations = 1; -} diff --git a/iris/api/grpc/protocol/vc_operations.proto b/iris/api/grpc/protocol/vc_operations.proto deleted file mode 100644 index 6bf3efc658..0000000000 --- a/iris/api/grpc/protocol/vc_operations.proto +++ /dev/null @@ -1,19 +0,0 @@ -syntax = "proto3"; - -import "scalapb/scalapb.proto"; - -option (scalapb.options) = { - no_default_values_in_constructor: true - package_name: "io.iohk.atala.iris.proto" -}; - -message IssueCredentialsBatch { - string issuer_did = 1; - bytes merkle_root = 2; -} - -message RevokeCredentials { - string revoker_did = 1; - bytes issuance_batch_digest = 2; - repeated bytes credentials_to_revoke = 3; -} diff --git a/iris/api/grpc/service.proto b/iris/api/grpc/service.proto deleted file mode 100644 index b51a0937ba..0000000000 --- a/iris/api/grpc/service.proto +++ /dev/null @@ -1,59 +0,0 @@ -syntax = "proto3"; - -import "scalapb/scalapb.proto"; -import "google/protobuf/timestamp.proto"; - -import "protocol/did_operations.proto"; -import "protocol/vc_operations.proto"; -import "protocol/dlt.proto"; - -option (scalapb.options) = { - no_default_values_in_constructor: true - package_name: "io.iohk.atala.iris.proto" -}; - -message IrisOperationId { - bytes id = 1; -} - -message IrisOperationOutcome { - bytes operation_id = 1; -} - -enum IrisOperationStatus { - PENDING = 0; - SUBMITTED = 1; - ROLLED_BACK = 2; - CONFIRMED = 3; -}; - -message IrisOperationInfo { - bytes operationId = 1; - oneof operation { - CreateDid create_did = 2; - UpdateDid update_did = 3; - RecoverDid recovery_did = 4; - DeactivateDid deactivate_did = 5; - IssueCredentialsBatch issue_credentials_batch = 6; - RevokeCredentials revoke_credentials = 7; - } -} - -message IrisBatchRequest { - // Hex representation of transaction id - // There is one to one correspondence between transaction and batch in it. - string last_seen_transaction_id = 1; -} - -message ConfirmedIrisBatch { - int32 block_level = 1; - google.protobuf.Timestamp blockTimestamp = 2; - string transactionId = 3; - IrisBatch batch = 4; -} - -service IrisService { - rpc ScheduleOperation(IrisOperation) returns (IrisOperationOutcome) {} - rpc GetOperation(IrisOperationId) returns (IrisOperationInfo) {} - rpc GetIrisBatchStream(IrisBatchRequest) returns (stream ConfirmedIrisBatch) {} -} diff --git a/iris/client/scala-client/CHANGELOG.md b/iris/client/scala-client/CHANGELOG.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/iris/client/scala-client/README.md b/iris/client/scala-client/README.md deleted file mode 100644 index 102c5cad5f..0000000000 --- a/iris/client/scala-client/README.md +++ /dev/null @@ -1,8 +0,0 @@ -## sbt project compiled with Scala 3 - -### Usage - -This is a normal sbt project. You can compile code with `sbt compile`, run it with `sbt run`, and `sbt console` will start a Scala 3 REPL. - -For more information on the sbt-dotty plugin, see the -[scala3-example-project](https://github.com/scala/scala3-example-project/blob/main/README.md). diff --git a/iris/service/CHANGELOG.md b/iris/service/CHANGELOG.md deleted file mode 100644 index 68a0abd94e..0000000000 --- a/iris/service/CHANGELOG.md +++ /dev/null @@ -1,30 +0,0 @@ -# [iris-service-v0.2.0](https://github.com/input-output-hk/atala-prism-building-blocks/compare/iris-service-v0.1.0...iris-service-v0.2.0) (2022-11-30) - - -### Features - -* **pollux:** implement Issue Credential v2 Protocol ([#144](https://github.com/input-output-hk/atala-prism-building-blocks/issues/144)) ([a80702f](https://github.com/input-output-hk/atala-prism-building-blocks/commit/a80702f5b255d8079085a6ec27c87baa6a23ac59)), closes [#92](https://github.com/input-output-hk/atala-prism-building-blocks/issues/92) - -# [iris-service-v0.1.0](https://github.com/input-output-hk/atala-prism-building-blocks/compare/iris-service-v0.0.1...iris-service-v0.1.0) (2022-11-11) - - -### Bug Fixes - -* **iris:** align type signature ([#72](https://github.com/input-output-hk/atala-prism-building-blocks/issues/72)) ([a19a781](https://github.com/input-output-hk/atala-prism-building-blocks/commit/a19a7814c3fc1e1cc89a861ae3942bf4a5fbad0a)) - - -### Features - -* **iris:** ATL-1791 Implement blockchain syncer functionality ([#49](https://github.com/input-output-hk/atala-prism-building-blocks/issues/49)) ([431b657](https://github.com/input-output-hk/atala-prism-building-blocks/commit/431b6575b8df2f4744285b1c5e2dd56072fa874c)) -* **shared:** Add environmnet configuration for Iris DB and bump scala version in other components to enable build ([#96](https://github.com/input-output-hk/atala-prism-building-blocks/issues/96)) ([a5b583f](https://github.com/input-output-hk/atala-prism-building-blocks/commit/a5b583f445b7efd31987cf9ca017bc544a877986)) - -# iris-service-v0.1.0 (2022-11-09) - -### Bug Fixes - - * iris: align type signature (#72 (https://github.com/input-output-hk/atala-prism-building-blocks/issues/72)) (a19a781 (https://github.com/input-output-hk/atala-prism-building-blocks/commit/a19a7814c3fc1e1cc89a861ae3942bf4a5fbad0a)) - -### Features - - * iris: ATL-1791 Implement blockchain syncer functionality (#49 (https://github.com/input-output-hk/atala-prism-building-blocks/issues/49)) (431b657 (https://github.com/input-output-hk/atala-prism-building-blocks/commit/431b6575b8df2f4744285b1c5e2dd56072fa874c)) - * shared: Add environmnet configuration for Iris DB and bump scala version in other components to enable build (#96 (https://github.com/input-output-hk/atala-prism-building-blocks/issues/96)) (a5b583f (https://github.com/input-output-hk/atala-prism-building-blocks/commit/a5b583f445b7efd31987cf9ca017bc544a877986)) diff --git a/iris/service/README.md b/iris/service/README.md deleted file mode 100644 index a22e626962..0000000000 --- a/iris/service/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Castor BB service - -## Quickstart - -__Running Iris service locally for development__ - -```bash -docker-compose -f docker/docker-compose-local.yaml up -d -sbt api-server/run -``` diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedBlock.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedBlock.scala deleted file mode 100644 index 2aa7507400..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedBlock.scala +++ /dev/null @@ -1,12 +0,0 @@ -package io.iohk.atala.iris.core.model - -import io.iohk.atala.iris.proto.dlt as proto -import io.iohk.atala.iris.core.model.ledger.TransactionId - -import java.time.Instant - -case class ConfirmedBlock( - blockLevel: Int, - blockTimestamp: Instant, - transactions: Seq[(TransactionId, proto.IrisBatch)] -) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedIrisBatch.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedIrisBatch.scala deleted file mode 100644 index ccdb4d8ee7..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedIrisBatch.scala +++ /dev/null @@ -1,14 +0,0 @@ -package io.iohk.atala.iris.core.model - -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.proto.dlt as proto - -import java.time.Instant - -case class ConfirmedIrisBatch( - blockLevel: Int, - blockTimestamp: Instant, - transactionSeqId: Int, - transactionId: TransactionId, - batch: proto.IrisBatch -) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/Models.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/Models.scala deleted file mode 100644 index 7fc22339cd..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/Models.scala +++ /dev/null @@ -1,7 +0,0 @@ -package io.iohk.atala.iris.core.model - -// TODO: replace with actual implementation -final case class IrisNotification(foo: String) - -final case class IrisOperationId(id: String) -final case class IrisOperation(foo: String) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Block.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Block.scala deleted file mode 100644 index accb1400f5..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Block.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -sealed trait Block extends Product with Serializable { - def header: BlockHeader -} - -object Block { - final case class Canonical(override val header: BlockHeader) extends Block - - final case class Full( - override val header: BlockHeader, - transactions: List[Transaction] - ) extends Block { - def toCanonical: Canonical = Canonical(header) - } -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockError.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockError.scala deleted file mode 100644 index 9b5df63a9f..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockError.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -sealed trait BlockError extends Product with Serializable - -object BlockError { - - case class NotFound(blockNo: Int) extends BlockError - - case object NoneAvailable extends BlockError -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHash.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHash.scala deleted file mode 100644 index 85a4374840..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHash.scala +++ /dev/null @@ -1,18 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import com.typesafe.config.ConfigMemorySize -import io.iohk.atala.shared.{HashValue, HashValueConfig, HashValueFrom} - -import scala.collection.compat.immutable.ArraySeq - -class BlockHash private (val value: ArraySeq[Byte]) extends AnyVal with HashValue {} - -object BlockHash extends HashValueFrom[BlockHash] { - - override val config: HashValueConfig = HashValueConfig( - ConfigMemorySize.ofBytes(32) - ) - - override protected def constructor(value: ArraySeq[Byte]): BlockHash = - new BlockHash(value) -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHeader.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHeader.scala deleted file mode 100644 index f20b1f4d98..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHeader.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import java.time.Instant - -case class BlockHeader( - hash: BlockHash, - blockNo: Int, - time: Instant, - previousBlockHash: Option[BlockHash] -) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Funds.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Funds.scala deleted file mode 100644 index 197ba8fd6f..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Funds.scala +++ /dev/null @@ -1,3 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -case class Funds(lovelaces: Int) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Ledger.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Ledger.scala deleted file mode 100644 index 1533fb532a..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Ledger.scala +++ /dev/null @@ -1,12 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import enumeratum.{Enum, EnumEntry} - -import scala.collection.immutable.ArraySeq - -case class Ledger(name: String) - -object Ledger { - val InMemory: Ledger = Ledger("in-memory") - val Mainnet: Ledger = Ledger("mainnet") -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Transaction.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Transaction.scala deleted file mode 100644 index c9c45334dd..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Transaction.scala +++ /dev/null @@ -1,8 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -case class Transaction( - id: TransactionId, - blockHash: BlockHash, - blockIndex: Int, - metadata: Option[TransactionMetadata] -) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionDetails.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionDetails.scala deleted file mode 100644 index a979b38dc3..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionDetails.scala +++ /dev/null @@ -1,3 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -case class TransactionDetails(id: TransactionId, status: TransactionStatus) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionId.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionId.scala deleted file mode 100644 index 3662437bc3..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionId.scala +++ /dev/null @@ -1,20 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import com.typesafe.config.ConfigMemorySize -import io.iohk.atala.shared.{HashValue, HashValueConfig, HashValueFrom} - -import scala.collection.immutable.ArraySeq - -class TransactionId private (bytes: ArraySeq[Byte]) extends HashValue { - override def value: ArraySeq[Byte] = bytes -} - -object TransactionId extends HashValueFrom[TransactionId] { - - override val config: HashValueConfig = HashValueConfig( - ConfigMemorySize.ofBytes(32) - ) - - override protected def constructor(value: ArraySeq[Byte]): TransactionId = - new TransactionId(value) -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionMetadata.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionMetadata.scala deleted file mode 100644 index 30611d7f23..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionMetadata.scala +++ /dev/null @@ -1,148 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import io.circe.Json -import io.circe.{ACursor, Json} -import io.iohk.atala.iris.proto.dlt as proto -import io.iohk.atala.shared.utils.BytesOps - -import scala.util.Try - -case class TransactionMetadata(json: Json) - -object TransactionMetadata { - // Last 16 bits of 344977920845, which is the decimal representation of the concatenation of the hexadecimal values - // (50 52 49 53 4d) of the word PRISM in ASCII. - val METADATA_PRISM_INDEX = 21325 - - private val VERSION_KEY = "v" - private val CONTENT_KEY = "c" - private val LEDGER_KEY = "l" - // Prefix to denote that the following characters represent a string of bytes in hexadecimal format - // (needed by Cardano Wallet) - private val BYTE_STRING_PREFIX = "0x" - // Maximum number of bytes that can be represented by a byte string (enforced by Cardano Node) - private val BYTE_STRING_LIMIT = 64 - - private val MAP_KEY = "k" - private val MAP_VALUE = "v" - private val MAP_TYPE = "map" - private val LIST_TYPE = "list" - private val INT_TYPE = "int" - private val STRING_TYPE = "string" - private val BYTES_TYPE = "bytes" - - // TODO add ledger here - def fromTransactionMetadata( - expectedLedger: Ledger, - metadata: TransactionMetadata - ): Option[proto.IrisBatch] = { - val prismMetadata = metadata.json.hcursor - .downField(METADATA_PRISM_INDEX.toString) - - for { - _ <- prismMetadata - .downField(VERSION_KEY) - .focus - .flatMap(_.asNumber) - .flatMap(_.toInt) - .find(_ == 2) - - _ <- prismMetadata - .downField(LEDGER_KEY) - .focus - .flatMap(_.asString) - .find(_ == expectedLedger.name) - - result <- fromTransactionMetadataV2(prismMetadata) - } yield result - } - - private def fromTransactionMetadataV2( - prismMetadata: ACursor - ): Option[proto.IrisBatch] = { - val bytes = prismMetadata - .downField(CONTENT_KEY) - .focus - .flatMap(_.asArray) - .getOrElse(Vector[Json]()) - .flatMap(parseByteString) - .toArray - if (bytes.isEmpty) { - // Either the content does not exist, is not the right type, or is truly empty - None - } else { - proto.IrisBatch.validate(bytes).toOption - } - } - - private def parseByteString(byteString: Json): Array[Byte] = { - byteString.asString - .map(_.stripPrefix(BYTE_STRING_PREFIX)) - .map(hex => Try(BytesOps.hexToBytes(hex)).getOrElse(Array[Byte]())) - .getOrElse(Array()) - } - - def toCardanoTransactionMetadata( - ledger: Ledger, - irisBatch: proto.IrisBatch - ): TransactionMetadata = { - // This definition aligns with the rules described here https://developers.cardano.org/docs/transaction-metadata/ - // After posting that data to the Cardano blockchain, it gets transformed to JSON - TransactionMetadata( - Json.obj( - METADATA_PRISM_INDEX.toString -> Json.obj( - MAP_TYPE -> Json.arr( - Json.obj( - MAP_KEY -> Json.obj(STRING_TYPE -> Json.fromString(VERSION_KEY)), - MAP_VALUE -> Json.obj(INT_TYPE -> Json.fromInt(2)) - ), - Json.obj( - MAP_KEY -> Json.obj(STRING_TYPE -> Json.fromString(LEDGER_KEY)), - MAP_VALUE -> Json.obj(STRING_TYPE -> Json.fromString(ledger.name)) - ), - Json.obj( - MAP_KEY -> Json.obj(STRING_TYPE -> Json.fromString(CONTENT_KEY)), - MAP_VALUE -> Json.obj( - LIST_TYPE -> Json.arr( - irisBatch.toByteArray - .grouped(BYTE_STRING_LIMIT) - .map(bytes => - Json.obj( - BYTES_TYPE -> Json.fromString( - BytesOps.bytesToHex(bytes) - ) - ) - ) - .toSeq: _* - ) - ) - ) - ) - ) - ) - ) - } - - def toInmemoryTransactionMetadata( - ledger: Ledger, - irisBatch: proto.IrisBatch - ): TransactionMetadata = - TransactionMetadata( - Json.obj( - METADATA_PRISM_INDEX.toString -> Json.obj( - VERSION_KEY -> Json.fromInt(2), - LEDGER_KEY -> Json.fromString(ledger.name), - CONTENT_KEY -> Json.arr( - irisBatch.toByteArray - .grouped(BYTE_STRING_LIMIT) - .map(bytes => Json.fromString(BytesOps.bytesToHex(bytes))) - .toSeq: _* - ) - ) - ) - ) - - def estimateTxMetadataSize(ledger: Ledger, irisBatch: proto.IrisBatch): Int = { - toCardanoTransactionMetadata(ledger, irisBatch).json.noSpaces.length - } -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionStatus.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionStatus.scala deleted file mode 100644 index 9cc38a9225..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionStatus.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import enumeratum.{Enum, EnumEntry} -import enumeratum.EnumEntry.Snakecase -import scala.collection.immutable.ArraySeq - -sealed trait TransactionStatus extends EnumEntry with Snakecase - -object TransactionStatus extends Enum[TransactionStatus] { - val values = ArraySeq(InMempool, Submitted, Expired, InLedger) - - case object InMempool extends TransactionStatus - case object Submitted extends TransactionStatus - case object Expired extends TransactionStatus - case object InLedger extends TransactionStatus -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/BlocksRepository.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/BlocksRepository.scala deleted file mode 100644 index 156e36ca15..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/BlocksRepository.scala +++ /dev/null @@ -1,9 +0,0 @@ -package io.iohk.atala.iris.core.repository - -import io.iohk.atala.iris.core.model.ledger.BlockError -import io.iohk.atala.iris.core.model.ledger.Block - -trait ROBlocksRepository[F[_]] { - def getFullBlock(blockNo: Int): F[Either[BlockError.NotFound, Block.Full]] - def getLatestBlock: F[Either[BlockError.NoneAvailable.type, Block.Canonical]] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/DbRepositoryTransactor.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/DbRepositoryTransactor.scala deleted file mode 100644 index 3df7e9e1e8..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/DbRepositoryTransactor.scala +++ /dev/null @@ -1,11 +0,0 @@ -package io.iohk.atala.iris.core.repository - -import zio.* - -/** This component intended to run several combined repository operations in one database transaction. The idea to have - * repositories traits instantiated with IOConnection and ZIO monads. Former to make possible to combine several - * operations in one DB transaction, latter to run repository operations without additional hustle. - */ -trait DbRepositoryTransactor[F[_]] { - def runAtomically[A](action: F[A]): Task[A] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/IrisBatchesRepository.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/IrisBatchesRepository.scala deleted file mode 100644 index bf4796bffa..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/IrisBatchesRepository.scala +++ /dev/null @@ -1,24 +0,0 @@ -package io.iohk.atala.iris.core.repository - -import io.iohk.atala.iris.core.model.ConfirmedIrisBatch -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.proto.dlt as proto - -import java.time.Instant - -trait ROIrisBatchesRepository[S[_]] { - - // Streams batches which are already on the database - // Every transaction contains a IrisBatch in its metadata, hence, - // there is one to one correspondence between TransactionId and IrisBatch - def getIrisBatchesStream(lastSeen: Option[TransactionId]): S[ConfirmedIrisBatch] -} - -/** @tparam F - * represents a monad where CRUD requests are executed - * @tparam S - * represents a monad for streaming of data - */ -trait IrisBatchesRepository[F[_], S[_]] extends ROIrisBatchesRepository[S] { - def saveIrisBatch(irisBatch: ConfirmedIrisBatch): F[Unit] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/KeyValueRepository.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/KeyValueRepository.scala deleted file mode 100644 index 97c43b7b64..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/KeyValueRepository.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.iris.core.repository - -trait ROKeyValueRepository[F[_]] { - def get(key: String): F[Option[String]] - def getInt(key: String): F[Option[Int]] -} - -trait KeyValueRepository[F[_]] extends ROKeyValueRepository[F] { - def set(key: String, value: Option[Int | String]): F[Unit] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/OperationsRepository.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/OperationsRepository.scala deleted file mode 100644 index 5775e6954e..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/OperationsRepository.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.iris.core.repository - -import io.iohk.atala.iris.core.model as model -import zio.* - -// TODO: replace with actual implementation -trait OperationsRepository[F[_]] { - def getOperation(id: model.IrisOperationId): F[model.IrisOperation] - def saveOperations(ops: Seq[model.IrisOperation]): F[Unit] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksSaveSinker.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksSaveSinker.scala deleted file mode 100644 index 1e6e7036d0..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksSaveSinker.scala +++ /dev/null @@ -1,61 +0,0 @@ -package io.iohk.atala.iris.core.service - -import cats.Monad -import cats.syntax.applicative.* -import cats.syntax.flatMap.* -import cats.syntax.functor.* -import cats.syntax.traverse.* -import io.iohk.atala.iris.core.model.{ConfirmedBlock, ConfirmedIrisBatch} -import io.iohk.atala.iris.core.repository.{DbRepositoryTransactor, IrisBatchesRepository, KeyValueRepository} -import zio.* -import zio.stream.* - -trait BlocksSaveSinker { - val sink: ZSink[Any, Throwable, ConfirmedBlock, Nothing, Unit] -} - -object BlocksSaveSinker { - def layer[F[_]: TagK: Monad, S[_]: TagK]: URLayer[ - KeyValueRepository[F] & IrisBatchesRepository[F, S] & DbRepositoryTransactor[F], - BlocksSaveSinker - ] = - ZLayer.fromFunction((x: KeyValueRepository[F], y: IrisBatchesRepository[F, S], z: DbRepositoryTransactor[F]) => - BlocksSaveSinkerImpl(x, y, z) - ) - -} - -/** @param keyValueRepo - * @param batchesRepo - * @param transactor - * @tparam F - * \- a monad which support combining operations which might be performed within one database transaction, like - * doobie.ConnectionIO - * @tparam S - * \- type representing a streaming type - */ -class BlocksSaveSinkerImpl[F[_]: Monad, S[_]]( - keyValueRepo: KeyValueRepository[F], - batchesRepo: IrisBatchesRepository[F, S], - transactor: DbRepositoryTransactor[F] -) extends BlocksSaveSinker { - - private val LAST_SYNCED_BLOCK_NO = "last_synced_block_no" - private val LAST_SYNCED_BLOCK_TIMESTAMP = "last_synced_block_timestamp" - - override val sink: ZSink[Any, Throwable, ConfirmedBlock, Nothing, Unit] = - ZSink.foreach[Any, Throwable, ConfirmedBlock](updateLastSyncedBlock) - - private def updateLastSyncedBlock(block: ConfirmedBlock): Task[Unit] = { - val timestampEpochMilli = block.blockTimestamp.toEpochMilli - transactor.runAtomically { - for { - _ <- keyValueRepo.set(LAST_SYNCED_BLOCK_NO, Some(block.blockLevel)) - _ <- keyValueRepo.set(LAST_SYNCED_BLOCK_TIMESTAMP, Some(timestampEpochMilli.toString)) - _ <- block.transactions.zipWithIndex.traverse { case ((txId, batch), i) => - batchesRepo.saveIrisBatch(ConfirmedIrisBatch(block.blockLevel, block.blockTimestamp, i, txId, batch)) - } - } yield () - } - } -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksStreamer.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksStreamer.scala deleted file mode 100644 index fb06ff66db..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksStreamer.scala +++ /dev/null @@ -1,155 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.core.model.ConfirmedBlock -import io.iohk.atala.iris.core.model.ledger.{Block, Ledger, TransactionId, TransactionMetadata} -import io.iohk.atala.iris.core.repository.{ROBlocksRepository, ROKeyValueRepository} -import io.iohk.atala.iris.proto.dlt as proto -import zio.* -import zio.stream.* - -trait BlocksStreamer { - val blocksStream: UStream[ConfirmedBlock] -} - -object BlocksStreamer { - case class Config(targetLedger: Ledger, genesisBlockNumber: Int, blockConfirmationsToWait: Int, blockEvery: Duration) - - def layer( - config: Config - ): URLayer[ROBlocksRepository[Task] & ROKeyValueRepository[Task], BlocksStreamer] = - ZLayer.fromFunction(BlocksStreamerImpl(_, _, config)) -} - -/** The goal of this streaming service is to emit batches of operations which are confirmed in the blockchain. It - * stateful and reply on block read only database and key value read only database. - * @param blocksRep - * \- read only storage of blocks from the blockchain - * @param keyValueRep - * \- read only key-value storage - * @param config - * \- protocol specific constants - */ -class BlocksStreamerImpl( - val blocksRep: ROBlocksRepository[Task], - val keyValueRep: ROKeyValueRepository[Task], - val config: BlocksStreamer.Config -) extends BlocksStreamer { - private val LAST_SYNCED_BLOCK_NO = "last_synced_block_no" - private val MAX_SYNC_BLOCKS = 100 - - private sealed trait BlocksSyncOutcome - - private object BlocksSyncOutcome { - case object MoreBlocksToSyncExist extends BlocksSyncOutcome - case object NoMoreBlocks extends BlocksSyncOutcome - } - - private type ConfirmedBlockCallback = ZStream.Emit[Any, Nothing, ConfirmedBlock, Unit] - - override val blocksStream: UStream[ConfirmedBlock] = ZStream.asyncZIO[Any, Nothing, ConfirmedBlock] { cb => - startSyncing().provideLayer(ZLayer.succeed(cb)).fork - } - - private def startSyncing(): RIO[ConfirmedBlockCallback, Unit] = { - for { - outcome <- syncMissingBlocks() - _ <- - if (outcome == BlocksSyncOutcome.NoMoreBlocks) { - ZIO.sleep(config.blockEvery).flatMap(_ => startSyncing()) - } else startSyncing() - } yield () - } - - /** Sync up on blocks from the blockchain and returns whether there are remaining blocks to sync. - */ - private def syncMissingBlocks(): RIO[ConfirmedBlockCallback, BlocksSyncOutcome] = { - for { - // Gets the number of the latest block processed by PRISM Node. - maybeLastSyncedBlockNo <- keyValueRep.getInt(LAST_SYNCED_BLOCK_NO) - // Calculates the next block based on the initial `blockNumberSyncStart` and the latest synced block. - lastSyncedBlockNo = calculateLastSyncedBlockNo( - maybeLastSyncedBlockNo, - config.genesisBlockNumber - ) - // Gets the latest block from the blocks database. - latestBlock <- blocksRep.getLatestBlock - // Calculates the latest confirmed block based on amount of required confirmations. - lastConfirmedBlockNo = latestBlock.map( - _.header.blockNo - config.blockConfirmationsToWait - ) - syncStart = lastSyncedBlockNo + 1 - // Sync no more than `MAX_SYNC_BLOCKS` during one `syncMissingBlocks` iteration. - syncEnd = lastConfirmedBlockNo.map( - math.min(_, lastSyncedBlockNo + MAX_SYNC_BLOCKS) - ) - // Sync all blocks with numbers from `syncStart` to `syncEnd` - _ <- syncEnd.fold(_ => ZIO.unit, end => syncBlocksInRange(syncStart to end)) - } yield lastConfirmedBlockNo - .flatMap(last => - syncEnd.map(end => if (last > end) BlocksSyncOutcome.MoreBlocksToSyncExist else BlocksSyncOutcome.NoMoreBlocks) - ) - .getOrElse(BlocksSyncOutcome.NoMoreBlocks) - } - - // Sync blocks in the given range. - private def syncBlocksInRange(blockNos: Range): RIO[ConfirmedBlockCallback, Unit] = { - if (blockNos.isEmpty) ZIO.unit - else { - // Sequentially sync blocks from the given range one by one. - ZIO.foreachDiscard(blockNos)(blockNo => syncBlock(blockNo)) - } - } - - // Sync block `blockNo` with internal state. - private def syncBlock(blockNo: Int): RIO[ConfirmedBlockCallback, Unit] = { - for { - // Retrieve block header and the list of transactions in the block. - block <- blocksRep.getFullBlock(blockNo) - // Maybe in future we will add block handler here - // Look over transactions in the block. - _ <- block.fold(_ => ZIO.unit, filterNPushBlock) - } yield () - } - - /** Filter out transactions in the `block` and push the block to the stream */ - private def filterNPushBlock(block: Block.Full): RIO[ConfirmedBlockCallback, Unit] = { - val transactions: List[(TransactionId, proto.IrisBatch)] = for { - // Iterate over transactions in the block. - transaction <- block.transactions - // Retrieve metadata from the transaction if it exists. - metadata <- transaction.metadata - // Parse metadata in accordance with the PRISM protocol if it's possible. - irisBatch <- TransactionMetadata.fromTransactionMetadata(config.targetLedger, metadata) - // Verify that operations related to the ledger protocol works on - ops = irisBatch.operations.filter(op => - op.operation.createDid.forall(_.ledger == config.targetLedger.name) && - op.operation.updateDid.forall(_.ledger == config.targetLedger.name) && - op.operation.recoverDid.forall(_.ledger == config.targetLedger.name) && - op.operation.deactivateDid.forall(_.ledger == config.targetLedger.name) - ) - nonEmptyBatch <- - if (ops.nonEmpty) { Some(proto.IrisBatch(ops)) } - else None - } yield (transaction.id, nonEmptyBatch) - - val confirmedBlock = - ConfirmedBlock( - blockLevel = block.header.blockNo, - blockTimestamp = block.header.time, - transactions = transactions - ) - - for { - cb <- ZIO.service[ConfirmedBlockCallback] - // Trigger callback attached to ZStream on every block - _ <- ZIO.succeed(cb(ZIO.succeed(Chunk(confirmedBlock)))) - } yield () - } - - private def calculateLastSyncedBlockNo( - maybeLastSyncedBlockNo: Option[Int], - blockNumberSyncStart: Int - ): Int = - math.max(maybeLastSyncedBlockNo.getOrElse(0), blockNumberSyncStart - 1) - -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerService.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerService.scala deleted file mode 100644 index 050ccdffb2..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerService.scala +++ /dev/null @@ -1,166 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.core.model.ledger.TransactionStatus.{InLedger, InMempool} -import io.iohk.atala.iris.core.model.ledger.* -import io.iohk.atala.iris.core.repository.ROBlocksRepository -import io.iohk.atala.iris.core.service.InmemoryUnderlyingLedgerService.{CardanoBlock, CardanoTransaction, Config} -import io.iohk.atala.iris.proto.dlt as proto -import io.iohk.atala.prism.crypto.Sha256 -import io.circe.{Json, parser} -import zio.* -import zio.stm.* - -import java.time.Instant -import java.util.concurrent.TimeUnit - -object InmemoryUnderlyingLedgerService { - case class Config(blockEvery: Duration, initialFunds: Funds, txFee: Funds, ledger: Ledger) - - case class CardanoTransaction(operations: Seq[proto.IrisOperation]) { - lazy val transactionId: TransactionId = { - val objectBytes = proto.IrisBatch(operations).toByteArray - val hash = Sha256.compute(objectBytes) - TransactionId - .from(hash.getValue) - .getOrElse(throw new RuntimeException("Unexpected invalid hash")) - } - } - - case class CardanoBlock(header: BlockHeader, txs: Seq[CardanoTransaction]) { - def toBlockFull(ledger: Ledger): Block.Full = { - Block.Full( - header, - txs.toList.map(tx => - Transaction( - id = tx.transactionId, - blockHash = header.hash, - blockIndex = header.blockNo, - metadata = Some(TransactionMetadata.toInmemoryTransactionMetadata(ledger, proto.IrisBatch(tx.operations))) - ) - ) - ) - } - } - - object CardanoBlock { - def evalBlockHash(txs: Seq[CardanoTransaction], prevHash: Option[BlockHash]): BlockHash = { - val bytes = prevHash.fold(Array.empty[Byte])(bh => bh.value.toArray) - val hash = Sha256.compute( - Array.concat(txs.map(_.transactionId.value.toArray).appended(bytes): _*) - ) - BlockHash.from(hash.getValue).getOrElse(throw new RuntimeException("Unexpected invalid hash")) - } - } - - def layer(config: Config): ULayer[InmemoryUnderlyingLedgerService] = ZLayer.fromZIO { - for { - mempoolRef <- TRef.make(Vector[CardanoTransaction]()).commit - blocksRef <- TRef.make(Vector[CardanoBlock]()).commit - initialBalance <- TRef.make(config.initialFunds).commit - srv = InmemoryUnderlyingLedgerService(config, mempoolRef, blocksRef, initialBalance) - _ <- srv.startBackgroundProcess() - } yield srv - } -} - -class InmemoryUnderlyingLedgerService( - config: Config, - mempoolRef: TRef[Vector[CardanoTransaction]], - blocksRef: TRef[Vector[CardanoBlock]], - balanceRef: TRef[Funds] -) extends UnderlyingLedgerService - with ROBlocksRepository[Task] { - - override def publish(operations: Seq[proto.IrisOperation]): IO[LedgerError, Unit] = - STM.atomically { - for { - curFunds <- balanceRef.get - newFunds <- STM.cond( - curFunds.lovelaces >= config.txFee.lovelaces, - Funds(curFunds.lovelaces - config.txFee.lovelaces), - LedgerError("Insufficient wallet balance") - ) - _ <- balanceRef.set(newFunds) - _ <- mempoolRef.update(_.appended(CardanoTransaction(operations))) - } yield () - } - - override def getTransactionDetails(transactionId: TransactionId): IO[LedgerError, TransactionDetails] = - STM.atomically { - for { - mempool <- mempoolRef.get - blockchain <- blocksRef.get - tdetails <- STM - .fromOption { - mempool - .find(_.transactionId == transactionId) - .map(_ => TransactionDetails(transactionId, InMempool)) - } - .orElse { - STM.fromOption { - blockchain - .find(block => block.txs.exists(t => t.transactionId == transactionId)) - .map(_ => TransactionDetails(transactionId, InLedger)) - } - } - .orElseFail(LedgerError(s"Couldn't find tx $transactionId")) - } yield tdetails - } - - override def deleteTransaction(transactionId: TransactionId): IO[LedgerError, Unit] = STM.atomically { - for { - mempool <- mempoolRef.get - _ <- STM.cond( - mempool.exists(_.transactionId == transactionId), - (), - LedgerError(s"Transaction $transactionId not found in the mempool") - ) - _ <- mempoolRef.update(m => m.filter(_.transactionId != transactionId)) - _ <- balanceRef.update(b => Funds(b.lovelaces + config.txFee.lovelaces)) - } yield () - } - - override def getWalletBalance: IO[LedgerError, Funds] = balanceRef.get.commit - - def getMempool: UIO[List[CardanoTransaction]] = mempoolRef.get.commit.map(_.toList) - - def getBlocks: UIO[List[CardanoBlock]] = blocksRef.get.commit.map(_.toList) - - private[service] def startBackgroundProcess(): UIO[Unit] = (for { - curTime <- Clock.currentTime(TimeUnit.MILLISECONDS).map(Instant.ofEpochMilli) - _ <- STM - .atomically { - for { - // Craft a new block from mempool transactions - txs <- mempoolRef.modify(old => (old, Vector.empty)) - prevHash <- blocksRef.get.map(_.lastOption.map(_.header.hash)) - blockIdx <- blocksRef.get.map(_.size) - blockHash = CardanoBlock.evalBlockHash(txs, prevHash) - blockHeader = BlockHeader(blockHash, blockIdx, curTime, prevHash) - _ <- blocksRef.update(_.appended(CardanoBlock(blockHeader, txs))) - } yield () - } - } yield ()) - .repeat(Schedule.spaced(config.blockEvery)) - .fork - .map(_ => ()) - - override def getFullBlock(blockNo: Int): Task[Either[BlockError.NotFound, Block.Full]] = STM.atomically { - for { - blocks <- blocksRef.get - res = - if (blockNo < blocks.size) { - Right(blocks.drop(blockNo).head.toBlockFull(config.ledger)) - } else { - Left(BlockError.NotFound(blockNo)) - } - } yield res - } - - override def getLatestBlock: Task[Either[BlockError.NoneAvailable.type, Block.Canonical]] = for { - blocks <- blocksRef.get.commit - res <- - if (blocks.isEmpty) { ZIO.succeed(Left(BlockError.NoneAvailable)) } - else ZIO.succeed(Right(Block.Canonical(blocks.last.header))) - } yield res -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/PublishingService.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/PublishingService.scala deleted file mode 100644 index 4f63fd8782..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/PublishingService.scala +++ /dev/null @@ -1,17 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.proto.dlt as proto -import zio.* - -// TODO: replace with actual implementation -trait PublishingService { - def publishOperation(op: proto.IrisOperation): UIO[Unit] -} - -object MockPublishingService { - val layer: ULayer[PublishingService] = ZLayer.succeed { - new PublishingService { - override def publishOperation(op: proto.IrisOperation): UIO[Unit] = ZIO.unit - } - } -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/UnderlyingLedgerService.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/UnderlyingLedgerService.scala deleted file mode 100644 index ff23525b09..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/UnderlyingLedgerService.scala +++ /dev/null @@ -1,20 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.proto.dlt as proto -import io.iohk.atala.iris.core.model.IrisOperation -import io.iohk.atala.iris.core.model.ledger.{Funds, TransactionDetails, TransactionId} -import zio.{IO, UIO} - -case class LedgerError(msg: String) extends RuntimeException(msg) - -trait UnderlyingLedgerService { -// def getType: Ledger - - def publish(operations: Seq[proto.IrisOperation]): IO[LedgerError, Unit] - - def getTransactionDetails(transactionId: TransactionId): IO[LedgerError, TransactionDetails] - - def deleteTransaction(transactionId: TransactionId): IO[LedgerError, Unit] - - def getWalletBalance: IO[LedgerError, Funds] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/worker/PublishingScheduler.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/worker/PublishingScheduler.scala deleted file mode 100644 index 9db35963ef..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/worker/PublishingScheduler.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.core.worker - -import io.iohk.atala.iris.proto.dlt as proto -import zio.{UIO, ULayer, ZIO, ZLayer} - -trait PublishingScheduler { - def scheduleOperations(op: proto.IrisOperation): UIO[Unit] -} - -object MockPublishingScheduler { - val layer: ULayer[PublishingScheduler] = ZLayer.succeed { - new PublishingScheduler { - def scheduleOperations(op: proto.IrisOperation): UIO[Unit] = ZIO.unit - } - } -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/DummyDbRepositoryTransactor.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/DummyDbRepositoryTransactor.scala deleted file mode 100644 index afa68afa0f..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/DummyDbRepositoryTransactor.scala +++ /dev/null @@ -1,12 +0,0 @@ -package io.iohk.atala.iris.core.mock - -import io.iohk.atala.iris.core.repository.DbRepositoryTransactor -import zio.* - -object DummyDbRepositoryTransactor { - val layer: ULayer[DbRepositoryTransactor[Task]] = ZLayer.succeed(DummyDbRepositoryTransactor()) -} - -class DummyDbRepositoryTransactor extends DbRepositoryTransactor[Task] { - override def runAtomically[A](action: Task[A]): Task[A] = action -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryIrisBatchesRepository.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryIrisBatchesRepository.scala deleted file mode 100644 index ef790ffce5..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryIrisBatchesRepository.scala +++ /dev/null @@ -1,28 +0,0 @@ -package io.iohk.atala.iris.core.mock - -import io.iohk.atala.iris.core.model.ConfirmedIrisBatch -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.core.repository.IrisBatchesRepository -import zio.* -import zio.stream.* - -type StreamZIO[A] = Stream[Throwable, A] - -object InMemoryIrisBatchesRepository { - val layer: ULayer[InMemoryIrisBatchesRepository] = ZLayer.fromZIO { - for { - ref <- Ref.make(Vector[ConfirmedIrisBatch]()) - srv = InMemoryIrisBatchesRepository(ref) - } yield srv - } -} - -class InMemoryIrisBatchesRepository(list: Ref[Vector[ConfirmedIrisBatch]]) - extends IrisBatchesRepository[Task, StreamZIO] { - override def saveIrisBatch(irisBatch: ConfirmedIrisBatch): Task[Unit] = list.update(_.appended(irisBatch)) - - override def getIrisBatchesStream(lastSeen: Option[TransactionId]): StreamZIO[ConfirmedIrisBatch] = - ZStream.fromIterableZIO(list.get) - - def getConfirmedBatches: Task[Vector[ConfirmedIrisBatch]] = list.get -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryKeyValueRepository.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryKeyValueRepository.scala deleted file mode 100644 index 935924b817..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryKeyValueRepository.scala +++ /dev/null @@ -1,21 +0,0 @@ -package io.iohk.atala.iris.core.mock - -import io.iohk.atala.iris.core.repository.KeyValueRepository -import zio.* - -object InMemoryKeyValueRepository { - val layer: ULayer[KeyValueRepository[Task]] = ZLayer.fromZIO { - for { - ref <- Ref.make(Map[String, Any]()) - srv = InMemoryKeyValueRepository(ref) - } yield srv - } -} - -class InMemoryKeyValueRepository(kv: Ref[Map[String, Any]]) extends KeyValueRepository[Task] { - override def get(key: String): Task[Option[String]] = kv.get.map(_.get(key).map(_.asInstanceOf[String])) - - override def getInt(key: String): Task[Option[Int]] = kv.get.map(_.get(key).map(_.asInstanceOf[Int])) - - override def set(key: String, value: Option[Int | String]): Task[Unit] = kv.update(_.updatedWith(key)(_ => value)) -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/BlockchainSyncSpec.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/BlockchainSyncSpec.scala deleted file mode 100644 index a33d80c917..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/BlockchainSyncSpec.scala +++ /dev/null @@ -1,118 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.core.mock.{ - InMemoryIrisBatchesRepository, - InMemoryKeyValueRepository, - DummyDbRepositoryTransactor -} -import io.iohk.atala.iris.core.model.ledger.{Funds, Ledger, TransactionId} -import io.iohk.atala.iris.core.testutils.PublishThenAdjust -import io.iohk.atala.iris.core.testutils.PublishThenAdjust.* -import io.iohk.atala.iris.core.testutils.RandomUtils.* -import zio.* -import zio.stream.* -import zio.test.* -import zio.interop.catz.* - -object BlockchainSyncSpec extends ZIOSpecDefault { - val blockEvery = 10.seconds - val inmemoryDefaultConfig = InmemoryUnderlyingLedgerService.Config(blockEvery, Funds(1000), Funds(1), Ledger.Mainnet) - val inmemoryLedgerLayer = InmemoryUnderlyingLedgerService.layer(inmemoryDefaultConfig) - - val keyValueRepoLayer = InMemoryKeyValueRepository.layer - - val blockchainStreamerConfig1BlockConfirm = BlocksStreamer.Config(Ledger.Mainnet, 0, 1, blockEvery) - val blockchainStreamer1Layer: TaskLayer[BlocksStreamer] = - (inmemoryLedgerLayer ++ keyValueRepoLayer) >>> BlocksStreamer.layer(blockchainStreamerConfig1BlockConfirm) - - val blockchainStreamerConfig3BlocksConfirm: BlocksStreamer.Config = - BlocksStreamer.Config(Ledger.Mainnet, 0, 3, blockEvery) - val blockchainStreamer3Layer: TaskLayer[BlocksStreamer] = - (inmemoryLedgerLayer ++ keyValueRepoLayer) >>> BlocksStreamer.layer(blockchainStreamerConfig3BlocksConfirm) - - type StreamZIO[A] = Stream[Throwable, A] - val irisBatchesRepoLayer = InMemoryIrisBatchesRepository.layer - val blockchainSaver: TaskLayer[BlocksSaveSinker] = - (keyValueRepoLayer ++ irisBatchesRepoLayer ++ DummyDbRepositoryTransactor.layer) >>> BlocksSaveSinker - .layer[Task, StreamZIO] - - override def spec = suite("BlockchainSyncSpec")( - test("Sync up 1 block with 4 transactions") { - val testCase = - for { - blocksSource <- ZIO.service[BlocksStreamer] - blocksSink <- ZIO.service[BlocksSaveSinker] - _ <- blocksSource.blocksStream.run(blocksSink.sink).fork - op <- ZIO.replicateZIO(4)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 1.seconds, - Seq(op(2)) >> 0.seconds, - Seq(op(3)) >> 20.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - irisBatchesRepo <- ZIO.service[InMemoryIrisBatchesRepository] - irisBatches <- irisBatchesRepo.getConfirmedBatches - expected = Vector( - "c4556a3d133b0a184a01baa9f3ea76a8fef2a06e66dec0907038997b2d7588de", - "8a89c3c1bbc39b5e5eb0db0ed8b12d876ec89f45a7dfeaaa7c24e39ed974aab1", - "0872cced55cab747ae0a3d463e5713e5cb9225617af04d7243b21d9d82751986", - "29798b9678930bc07c097adffaf3e13ae044af64d2b950af0414c231e3a06b8a" - ).map(TransactionId.from(_).get) - } yield assertTrue(irisBatches.map(_.transactionId) == expected) - testCase.provideLayer(inmemoryLedgerLayer ++ blockchainStreamer1Layer ++ blockchainSaver ++ irisBatchesRepoLayer) - }, - test("Sync up 1 block with 2 transaction") { - val testCase = - for { - blocksSource <- ZIO.service[BlocksStreamer] - blocksSink <- ZIO.service[BlocksSaveSinker] - _ <- blocksSource.blocksStream.run(blocksSink.sink).fork - op <- ZIO.replicateZIO(4)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0), op(1)) >> 1.seconds, - Seq(op(2), op(3)) >> 20.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - irisBatchesRepo <- ZIO.service[InMemoryIrisBatchesRepository] - irisBatches <- irisBatchesRepo.getConfirmedBatches - expected = Vector( - "1c6fd29ae378a773b2e3957be37ead077aa98d7041a3d4bb6533eb0a95e4058c", - "919e893246ffd0543d9005d9fdea6e2b26b85b150eb953fc4ba368097546d347" - ).map(TransactionId.from(_).get) - } yield assertTrue(irisBatches.map(_.transactionId) == expected) - testCase.provideLayer( - inmemoryLedgerLayer ++ blockchainStreamer1Layer ++ blockchainSaver ++ irisBatchesRepoLayer - ) - }, - test("Block confirmation is 3") { - val testCase = - for { - blocksSource <- ZIO.service[BlocksStreamer] - blocksSink <- ZIO.service[BlocksSaveSinker] - _ <- blocksSource.blocksStream.run(blocksSink.sink).fork - op <- ZIO.replicateZIO(6)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1), op(2)) >> blockEvery, - Seq(op(3)) >> blockEvery, - Seq(op(4)) >> blockEvery, - Seq(op(5)) >> blockEvery, - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - irisBatchesRepo <- ZIO.service[InMemoryIrisBatchesRepository] - expected = Vector( - "c4556a3d133b0a184a01baa9f3ea76a8fef2a06e66dec0907038997b2d7588de", - "de69eda103be2676872937a0622cb9d831939d595fb9f80fba5da36cfe28d174" - ).map(TransactionId.from(_).get) - irisBatches <- irisBatchesRepo.getConfirmedBatches - } yield assertTrue(irisBatches.map(_.transactionId) == expected) - testCase.provideLayer( - inmemoryLedgerLayer ++ blockchainStreamer3Layer ++ blockchainSaver ++ irisBatchesRepoLayer - ) - }, - ) -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerServiceSpec.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerServiceSpec.scala deleted file mode 100644 index fb2054301e..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerServiceSpec.scala +++ /dev/null @@ -1,179 +0,0 @@ -package io.iohk.atala.iris.core.service - -import com.google.protobuf.ByteString -import io.iohk.atala.iris.core.model.ledger.TransactionStatus.{InLedger, InMempool} -import io.iohk.atala.iris.core.model.ledger.{Funds, Ledger, TransactionDetails} -import io.iohk.atala.iris.core.service.InmemoryUnderlyingLedgerService.{CardanoBlock, CardanoTransaction} -import io.iohk.atala.iris.core.testutils.RandomUtils.* -import io.iohk.atala.iris.core.testutils.PublishThenAdjust.* -import io.iohk.atala.iris.core.testutils.PublishThenAdjust -import io.iohk.atala.iris.proto.did_operations.{CreateDid, DocumentDefinition, UpdateDid} -import io.iohk.atala.iris.proto.dlt as proto -import zio.* -import zio.test.* -import zio.test.TestAspect.ignore -import zio.test.Assertion.* - -object InmemoryUnderlyingLedgerServiceSpec extends ZIOSpecDefault { - val defaultConfig = InmemoryUnderlyingLedgerService.Config(10.seconds, Funds(1000), Funds(1), Ledger.Mainnet) - val inmemoryLedger = InmemoryUnderlyingLedgerService.layer(defaultConfig) - - def spec = suite("InmemoryUnderlyingLedgerServiceSpec")( - suite("Background worker")( - test("All the operations in the one block within 4 different transactions") { - val testCase = - for { - op <- ZIO.replicateZIO(4)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 1.seconds, - Seq(op(2)) >> 0.seconds, - Seq(op(3)) >> 20.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - mempool <- srvc.getMempool - blocks <- srvc.getBlocks - } yield assertTrue(mempool == List.empty) && - assertTrue( - blocks.map(_.txs) == List( - List(), - List( - CardanoTransaction(Seq(op(0))), - CardanoTransaction(Seq(op(1))), - CardanoTransaction(Seq(op(2))), - CardanoTransaction(Seq(op(3))) - ), - List() - ) - ) - testCase.provideLayer(inmemoryLedger) - } @@ TestAspect.ignore, - test("Operations distributed between 2 blocks") { - val testCase = - for { - op <- ZIO.replicateZIO(4)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 10.seconds, - Seq(op(2)) >> 0.seconds, - Seq(op(3)) >> 10.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - mempool <- srvc.getMempool - blocks <- srvc.getBlocks - } yield assertTrue(mempool == List.empty) && - assertTrue( - blocks.map(_.txs) == List( - List(), - List( - CardanoTransaction(Seq(op(0))), - CardanoTransaction(Seq(op(1))), - ), - List( - CardanoTransaction(Seq(op(2))), - CardanoTransaction(Seq(op(3))), - ) - ) - ) - testCase.provideLayer(inmemoryLedger) - } @@ TestAspect.ignore - ), - suite("getTransactionDetails")( - test("Find unconfirmed transaction") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 10.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 2.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(2), op(3))) - txDetails <- srvc.getTransactionDetails(targetTx.transactionId) - } yield assertTrue(txDetails == TransactionDetails(targetTx.transactionId, InMempool)) - testCase.provideLayer(inmemoryLedger) - }, - test("Find confirmed transaction") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 11.seconds, - Seq(op(1)) >> 11.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 12.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(2), op(3))) - txDetails <- srvc.getTransactionDetails(targetTx.transactionId) - } yield assertTrue(txDetails == TransactionDetails(targetTx.transactionId, InLedger)) - testCase.provideLayer(inmemoryLedger) - }, - test("Find unknown transaction") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 11.seconds, - Seq(op(1)) >> 11.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 12.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(1), op(2))) - testResult <- assertZIO(srvc.getTransactionDetails(targetTx.transactionId).exit) { - fails(equalTo(LedgerError(s"Couldn't find tx ${targetTx.transactionId}"))) - } - } yield testResult - testCase.provideLayer(inmemoryLedger) - } - ), - suite("deleteTransaction")( - test("Delete transaction from mempool") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 10.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 2.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(2), op(3))) - _ <- srvc.deleteTransaction(targetTx.transactionId) - mempool <- srvc.getMempool - } yield assertTrue(mempool == List(CardanoTransaction(Seq(op(4))))) - testCase.provideLayer(inmemoryLedger) - }, - test("Delete confirmed transaction") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 10.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 2.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(1))) - testResult <- - assertZIO(srvc.deleteTransaction(targetTx.transactionId).exit) { - fails(equalTo(LedgerError(s"Transaction ${targetTx.transactionId} not found in the mempool"))) - } - } yield testResult - testCase.provideLayer(inmemoryLedger) - } - ) - ) -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/PublishThenAdjust.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/PublishThenAdjust.scala deleted file mode 100644 index fdf185ee1d..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/PublishThenAdjust.scala +++ /dev/null @@ -1,19 +0,0 @@ -package io.iohk.atala.iris.core.testutils - -import io.iohk.atala.iris.core.service.InmemoryUnderlyingLedgerService -import zio.* -import zio.test.* -import io.iohk.atala.iris.proto.dlt as proto - -case class PublishThenAdjust(operations: Seq[proto.IrisOperation], adjust: Duration) - -object PublishThenAdjust { - implicit class Then(operations: Seq[proto.IrisOperation]) { - def >>(adj: Duration): PublishThenAdjust = PublishThenAdjust(operations, adj) - } - - def foreachZIO[R](srv: InmemoryUnderlyingLedgerService)(xs: Iterable[PublishThenAdjust]): ZIO[R, Any, Unit] = - ZIO.foreachDiscard[R, Any, PublishThenAdjust](xs) { case PublishThenAdjust(ops, adj) => - srv.publish(ops).flatMap(_ => TestClock.adjust(adj)) - } -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/RandomUtils.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/RandomUtils.scala deleted file mode 100644 index 9f5ddddaf8..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/RandomUtils.scala +++ /dev/null @@ -1,56 +0,0 @@ -package io.iohk.atala.iris.core.testutils - -import com.google.protobuf.ByteString -import io.iohk.atala.iris.proto.did_operations.{CreateDid, DocumentDefinition, UpdateDid} -import io.iohk.atala.iris.proto.dlt as proto -import zio.{Random, UIO} - -object RandomUtils { - - private def nextBytes(length: Int): UIO[ByteString] = - Random.nextBytes(length).map(x => ByteString.copyFrom(x.toArray)) - - def genCreateOperation(): UIO[proto.IrisOperation] = - for { - updComm <- nextBytes(20) - recComm <- nextBytes(20) - } yield proto.IrisOperation( - proto.IrisOperation.Operation.CreateDid( - CreateDid( - initialUpdateCommitment = updComm, - initialRecoveryCommitment = recComm, - ledger = "mainnet", - document = Some(DocumentDefinition(publicKeys = Seq(), services = Seq())) - ) - ) - ) - - def genUpdateOperation(): UIO[proto.IrisOperation] = - for { - didSuff <- Random.nextString(10) - updKey <- nextBytes(20) - prevVers <- nextBytes(20) - forwUpdComm <- nextBytes(20) - sig <- nextBytes(20) - } yield proto.IrisOperation( - proto.IrisOperation.Operation.UpdateDid( - UpdateDid( - did = "did:prism:" + didSuff, - revealedUpdateKey = updKey, - previousVersion = prevVers, - forwardUpdateCommitment = forwUpdComm, - patches = Seq(), - ledger = "mainnet", - signature = sig - ) - ) - ) - - def genOperation(): UIO[proto.IrisOperation] = - for { - op <- Random.nextBoolean - res <- - if (op) genCreateOperation() - else genUpdateOperation() - } yield res -} diff --git a/iris/service/docker/docker-compose-local.yaml b/iris/service/docker/docker-compose-local.yaml deleted file mode 100644 index 2ef4e9f0c6..0000000000 --- a/iris/service/docker/docker-compose-local.yaml +++ /dev/null @@ -1,34 +0,0 @@ -version: "3.9" - -services: - db: - image: postgres:13 - restart: always - environment: - POSTGRES_DB: iris - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - ports: - - 5432:5432 - volumes: - - pg_data_iris_db:/var/lib/postgresql/data - - # delay to ensure DB is up before applying migrations - db_init_delay: - image: alpine:3 - command: sleep 5 - depends_on: - db: - condition: service_started - - db_init: - image: flyway/flyway:9.3.0-alpine - volumes: - - $PWD/migrations/sql:/flyway/sql - command: -url=jdbc:postgresql://db:5432/iris?user=postgres&password=postgres migrate - depends_on: - db_init_delay: - condition: service_completed_successfully - -volumes: - pg_data_iris_db: diff --git a/iris/service/migrations/sql/V1__init_tables.sql b/iris/service/migrations/sql/V1__init_tables.sql deleted file mode 100644 index 487fbbd7f7..0000000000 --- a/iris/service/migrations/sql/V1__init_tables.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE public.iris_operations( - "foo" VARCHAR(100) NOT NULL -); diff --git a/iris/service/server/src/main/resources/application.conf b/iris/service/server/src/main/resources/application.conf deleted file mode 100644 index 0ee97184d8..0000000000 --- a/iris/service/server/src/main/resources/application.conf +++ /dev/null @@ -1,14 +0,0 @@ -iris { - database { - host = "localhost" - host = ${?IRIS_DB_HOST} - port = 5432 - port = ${?IRIS_DB_PORT} - databaseName = "iris" - databaseName = ${?IRIS_DB_NAME} - username = "postgres" - username = ${?IRIS_DB_USER} - password = "postgres" - password = ${?IRIS_DB_PASSWORD} - } -} \ No newline at end of file diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Main.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Main.scala deleted file mode 100644 index 024d19f7f9..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Main.scala +++ /dev/null @@ -1,8 +0,0 @@ -package io.iohk.atala.iris.server - -import zio.* -import zio.stream.* - -object Main extends ZIOAppDefault { - override def run = Modules.app -} diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Modules.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Modules.scala deleted file mode 100644 index 8fbc64cea2..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Modules.scala +++ /dev/null @@ -1,109 +0,0 @@ -package io.iohk.atala.iris.server - -import cats.effect.std.Dispatcher -import doobie.util.transactor.Transactor -import io.iohk.atala.iris.core.repository.* -import io.iohk.atala.iris.core.service.* -import io.iohk.atala.iris.core.worker.{MockPublishingScheduler, PublishingScheduler} -import io.iohk.atala.iris.proto.service.IrisServiceGrpc -import io.iohk.atala.iris.server.grpc.service.IrisServiceGrpcImpl -import io.iohk.atala.iris.server.grpc.{GrpcServer, GrpcServices} -import io.iohk.atala.iris.sql.repository -import io.iohk.atala.iris.sql.repository.* -import zio.* -import zio.interop.catz.* -import zio.stream.ZStream -import com.typesafe.config.ConfigFactory -import io.iohk.atala.iris.server.config.AppConfig -import zio.config.typesafe.TypesafeConfigSource -import zio.config.{ReadError, read} - -object Modules { - val app: Task[Unit] = { - val grpcServerApp = GrpcServices.services.flatMap(GrpcServer.start(8081, _)) - - grpcServerApp - .provideLayer(GrpcModule.layers) - .unit - } - -} - -// TODO: replace with actual implementation -object AppModule { - val publishingServiceLayer: ULayer[PublishingService] = MockPublishingService.layer - val publishingSchedulerLayer: ULayer[PublishingScheduler] = MockPublishingScheduler.layer - - val configLayer: Layer[ReadError[String], AppConfig] = ZLayer.fromZIO { - read( - AppConfig.descriptor.from( - TypesafeConfigSource.fromTypesafeConfig( - ZIO.attempt(ConfigFactory.load()) - ) - ) - ) - } -} - -object GrpcModule { - val irisServiceGrpcLayer: TaskLayer[IrisServiceGrpc.IrisService] = { - val schedulerLayer = AppModule.publishingSchedulerLayer - val irisBatchesLayer = RepoModule.irisBatchesRepoLayer - (schedulerLayer ++ irisBatchesLayer) >>> IrisServiceGrpcImpl.layer - } - - val layers = irisServiceGrpcLayer -} - -object BlockchainModule { - def blocksStreamerLayer(config: BlocksStreamer.Config): TaskLayer[BlocksStreamer] = { - val blocksRepoLayer = RepoModule.blocksRepoLayer - val keyValueRepoLayer = RepoModule.keyValueRepoLayer - (blocksRepoLayer ++ keyValueRepoLayer) >>> BlocksStreamer.layer(config) - } - - val blocksSaverLayer: TaskLayer[BlocksSaveSinker] = { - val keyValueIO = JdbcKeyValueRepositoryIO.layer - val irisBatchesIO = JdbcIrisBatchRepositoryIO.layer - val dbRepositoryTransactorIO = RepoModule.dbRepositoryTransactor - (keyValueIO ++ irisBatchesIO ++ dbRepositoryTransactorIO) >>> BlocksSaveSinker - .layer[repository.IO, repository.StreamIO] - } -} - -object RepoModule { - val transactorLayer: TaskLayer[Transactor[Task]] = { - val layerWithConfig = ZLayer.fromZIO { - ZIO.service[AppConfig].map(_.iris.database).flatMap { config => - Dispatcher[Task].allocated.map { case (dispatcher, _) => - given Dispatcher[Task] = dispatcher - TransactorLayer.hikari[Task]( - TransactorLayer.DbConfig( - username = config.username, - password = config.password, - jdbcUrl = s"jdbc:postgresql://${config.host}:${config.port}/${config.databaseName}" - ) - ) - } - } - }.flatten - AppModule.configLayer >>> layerWithConfig - } - - val dbRepositoryTransactor: TaskLayer[JdbcDbRepositoryTransactorIO] = - transactorLayer >>> JdbcDbRepositoryTransactorIO.layer - - val operationsRepoLayer: TaskLayer[OperationsRepository[Task]] = - transactorLayer >>> JdbcOperationsRepository.layer - - val irisBatchesRepoLayer: TaskLayer[IrisBatchesRepository[Task, StreamZIO]] = - (transactorLayer ++ JdbcIrisBatchRepositoryIO.layer) >>> JdbcIrisBatchRepository.layer - - val blocksRepoLayer: TaskLayer[ROBlocksRepository[Task]] = - transactorLayer >>> JdbcBlocksRepository.layer - - val keyValueRepoLayer: TaskLayer[KeyValueRepository[Task]] = - (transactorLayer ++ JdbcKeyValueRepositoryIO.layer) >>> JdbcKeyValueRepository.layer - - val layers = operationsRepoLayer ++ irisBatchesRepoLayer ++ blocksRepoLayer ++ keyValueRepoLayer -} diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/config/AppConfig.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/config/AppConfig.scala deleted file mode 100644 index 555a221c97..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/config/AppConfig.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.server.config - -import zio.config.* -import zio.config.magnolia.Descriptor - -final case class AppConfig( - iris: IrisConfig -) - -object AppConfig { - val descriptor: ConfigDescriptor[AppConfig] = Descriptor[AppConfig] -} - -final case class IrisConfig(database: DatabaseConfig) - -final case class DatabaseConfig(host: String, port: Int, databaseName: String, username: String, password: String) diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServer.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServer.scala deleted file mode 100644 index b3e9203464..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServer.scala +++ /dev/null @@ -1,38 +0,0 @@ -package io.iohk.atala.iris.server.grpc - -import io.grpc.{ServerBuilder, ServerServiceDefinition} -import io.grpc.protobuf.services.ProtoReflectionService -import io.iohk.atala.iris.proto.service.IrisServiceGrpc -import zio.* - -object GrpcServer { - - def start(port: Int, services: Seq[ServerServiceDefinition]): Task[Unit] = { - val managedServer = ZIO.acquireRelease( - for { - _ <- ZIO.logInfo(s"starting grpc server on port $port") - server <- ZIO.attempt { - val builder = ServerBuilder.forPort(port) - services.foreach(s => builder.addService(s)) - builder.addService(ProtoReflectionService.newInstance()) - builder.build().start() - } - _ <- ZIO.logInfo(s"grpc server listening on port $port") - } yield server - )(server => - for { - _ <- ZIO.logInfo("stopping grpc server") - _ <- ZIO.attempt(server.shutdown()).orDie - _ <- ZIO.logInfo("grpc server stopped successfully") - } yield () - ) - - ZIO.scoped { - for { - _ <- managedServer - _ <- ZIO.never - } yield () - } - } - -} diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServices.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServices.scala deleted file mode 100644 index f6275acfd8..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServices.scala +++ /dev/null @@ -1,15 +0,0 @@ -package io.iohk.atala.iris.server.grpc - -import io.grpc.ServerServiceDefinition -import io.iohk.atala.iris.proto.service.IrisServiceGrpc -import zio.* - -object GrpcServices { - - def services: URIO[IrisServiceGrpc.IrisService, Seq[ServerServiceDefinition]] = - for { - ec <- ZIO.executor.map(_.asExecutionContext) - irisService <- ZIO.serviceWith[IrisServiceGrpc.IrisService](IrisServiceGrpc.bindService(_, ec)) - } yield Seq(irisService) - -} diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/service/IrisServiceGrpcImpl.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/service/IrisServiceGrpcImpl.scala deleted file mode 100644 index 72a494bd0f..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/service/IrisServiceGrpcImpl.scala +++ /dev/null @@ -1,101 +0,0 @@ -package io.iohk.atala.iris.server.grpc.service - -import com.google.protobuf.ByteString -import io.grpc.stub.StreamObserver -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.core.repository.ROIrisBatchesRepository -import io.iohk.atala.iris.core.service.PublishingService -import io.iohk.atala.iris.core.worker.PublishingScheduler -import io.iohk.atala.iris.proto.did_operations.{CreateDid, DocumentDefinition} -import io.iohk.atala.iris.proto.{dlt as proto, service as proto_service} -import com.google.protobuf.timestamp as proto_google -import io.iohk.atala.iris.proto.service.* -import zio.* -import zio.stream.* - -import scala.concurrent.Future - -type Stream[A] = ZStream[Any, Throwable, A] - -class IrisServiceGrpcImpl(service: PublishingScheduler, batchRepo: ROIrisBatchesRepository[Stream])(using - runtime: Runtime[Any] -) extends IrisServiceGrpc.IrisService { - - private val mockOperationId = ByteString.copyFrom("aaafff111".getBytes()) - private val mockOperation = IrisOperationInfo.Operation.CreateDid( - CreateDid( - initialUpdateCommitment = ByteString.copyFrom("a".getBytes()), - initialRecoveryCommitment = ByteString.copyFrom("b".getBytes()), - ledger = "https://atalaprism.io", - document = Some(DocumentDefinition(publicKeys = Seq(), services = Seq())) - ) - ) - - override def scheduleOperation(request: proto.IrisOperation): Future[IrisOperationOutcome] = Unsafe.unsafe { - implicit unsafe => - runtime.unsafe.runToFuture(ZIO.succeed(IrisOperationOutcome(mockOperationId))) - } - - override def getOperation(request: IrisOperationId): Future[IrisOperationInfo] = Unsafe.unsafe { implicit unsafe => - runtime.unsafe.runToFuture( - ZIO.succeed( - IrisOperationInfo( - operationId = mockOperationId, - operation = mockOperation - ) - ) - ) - } - - override def getIrisBatchStream( - request: IrisBatchRequest, - responseObserver: StreamObserver[ConfirmedIrisBatch] - ): Unit = { - Unsafe.unsafe { implicit unsafe => - val txIdHex = request.lastSeenTransactionId - runtime.unsafe - .run { - for { - txId <- - if (txIdHex.isEmpty) ZIO.succeed(None) - else { ZIO.fromOption(TransactionId.from(txIdHex)).map(Some(_)) } - _ <- batchRepo - .getIrisBatchesStream(txId) - .foreach { b => - ZIO.succeedBlocking { - responseObserver.onNext( - proto_service - .ConfirmedIrisBatch( - blockLevel = b.blockLevel, - blockTimestamp = - Some(proto_google.Timestamp(b.blockTimestamp.getEpochSecond, b.blockTimestamp.getNano)), - transactionId = b.transactionId.toString, - batch = Some(b.batch) - ) - ) - } - } - .onError { cause => - cause.failureOption.fold(ZIO.unit) { e => - ZIO.succeedBlocking { - responseObserver.onError(e) - } - } - } - } yield () - } - .getOrThrowFiberFailure() - } - } -} - -object IrisServiceGrpcImpl { - val layer: URLayer[PublishingScheduler & ROIrisBatchesRepository[Stream], IrisServiceGrpc.IrisService] = - ZLayer.fromZIO { - for { - rt <- ZIO.runtime[Any] - svc <- ZIO.service[PublishingScheduler] - repo <- ZIO.service[ROIrisBatchesRepository[Stream]] - } yield IrisServiceGrpcImpl(svc, repo)(using rt) - } -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcBlocksRepository.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcBlocksRepository.scala deleted file mode 100644 index c49cd9c4c8..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcBlocksRepository.scala +++ /dev/null @@ -1,17 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.Transactor -import io.iohk.atala.iris.core.model.ledger.{Block, BlockError} -import zio.* -import io.iohk.atala.iris.core.repository.ROBlocksRepository - -class JdbcBlocksRepository(xa: Transactor[Task]) extends ROBlocksRepository[Task] { - override def getFullBlock(blockNo: RuntimeFlags): Task[Either[BlockError.NotFound, Block.Full]] = ??? - - override def getLatestBlock: Task[Either[BlockError.NoneAvailable.type, Block.Canonical]] = ??? -} - -object JdbcBlocksRepository { - val layer: URLayer[Transactor[Task], ROBlocksRepository[Task]] = - ZLayer.fromFunction(new JdbcBlocksRepository(_)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcDbRepositoryTransactorIO.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcDbRepositoryTransactorIO.scala deleted file mode 100644 index 7d635dba6a..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcDbRepositoryTransactorIO.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.* -import doobie.implicits.* -import io.iohk.atala.iris.core.repository.DbRepositoryTransactor -import zio.* -import zio.interop.catz.* - -class JdbcDbRepositoryTransactorIO(xa: Transactor[Task]) extends DbRepositoryTransactor[ConnectionIO] { - override def runAtomically[A](action: ConnectionIO[A]): Task[A] = action.transact(xa) -} - -object JdbcDbRepositoryTransactorIO { - val layer: URLayer[Transactor[Task], JdbcDbRepositoryTransactorIO] = - ZLayer.fromFunction(new JdbcDbRepositoryTransactorIO(_)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcIrisBatchRepository.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcIrisBatchRepository.scala deleted file mode 100644 index de7a74ad6a..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcIrisBatchRepository.scala +++ /dev/null @@ -1,35 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.* -import fs2.Stream -import io.iohk.atala.iris.core.model.ConfirmedIrisBatch -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.core.repository.IrisBatchesRepository -import zio.* - -class JdbcIrisBatchRepositoryIO extends IrisBatchesRepository[ConnectionIO, StreamIO] { - override def saveIrisBatch(irisBatch: ConfirmedIrisBatch): ConnectionIO[Unit] = ??? - - override def getIrisBatchesStream(lastSeen: Option[TransactionId]): StreamIO[ConfirmedIrisBatch] = ??? -} - -object JdbcIrisBatchRepositoryIO { - val layer: ULayer[IrisBatchesRepository[ConnectionIO, StreamIO]] = - ZLayer.succeed(new JdbcIrisBatchRepositoryIO) -} - -class JdbcIrisBatchRepository(xa: Transactor[Task], ioImpl: IrisBatchesRepository[ConnectionIO, StreamIO]) - extends IrisBatchesRepository[Task, StreamZIO] { - - override def saveIrisBatch(irisBatch: ConfirmedIrisBatch): Task[Unit] = ??? - - override def getIrisBatchesStream(lastSeen: Option[TransactionId]): StreamZIO[ConfirmedIrisBatch] = ??? -} - -object JdbcIrisBatchRepository { - val layer: URLayer[Transactor[Task] & IrisBatchesRepository[ConnectionIO, StreamIO], IrisBatchesRepository[ - Task, - StreamZIO - ]] = - ZLayer.fromFunction(new JdbcIrisBatchRepository(_, _)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcKeyValueRepository.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcKeyValueRepository.scala deleted file mode 100644 index d6ccdc9bc6..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcKeyValueRepository.scala +++ /dev/null @@ -1,33 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.* -import io.iohk.atala.iris.core.repository.KeyValueRepository -import zio.* - -class JdbcKeyValueRepositoryIO extends KeyValueRepository[ConnectionIO] { - override def get(key: String): ConnectionIO[Option[String]] = ??? - - override def getInt(key: String): ConnectionIO[Option[Int]] = ??? - - override def set(key: String, value: Option[Int | String]): ConnectionIO[Unit] = ??? -} - -object JdbcKeyValueRepositoryIO { - val layer: ULayer[KeyValueRepository[ConnectionIO]] = - ZLayer.succeed(new JdbcKeyValueRepositoryIO) -} - -class JdbcKeyValueRepository(xa: Transactor[Task], ioImpl: KeyValueRepository[ConnectionIO]) - extends KeyValueRepository[Task] { - - override def get(key: String): Task[Option[String]] = ??? - - override def getInt(key: String): Task[Option[RuntimeFlags]] = ??? - - override def set(key: String, value: Option[Int | String]): Task[Unit] = ??? -} - -object JdbcKeyValueRepository { - val layer: URLayer[Transactor[Task] & KeyValueRepository[ConnectionIO], KeyValueRepository[Task]] = - ZLayer.fromFunction(new JdbcKeyValueRepository(_, _)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcOperationsRepository.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcOperationsRepository.scala deleted file mode 100644 index 58434c8cfd..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcOperationsRepository.scala +++ /dev/null @@ -1,30 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.* -import doobie.implicits.* -import io.iohk.atala.iris.core.model -import io.iohk.atala.iris.core.repository.OperationsRepository -import io.iohk.atala.iris.sql.repository.JdbcOperationsRepository -import zio.* -import zio.interop.catz.* - -// TODO: replace with actual implementation -class JdbcOperationsRepository(xa: Transactor[Task]) extends OperationsRepository[Task] { - - override def getOperation(id: model.IrisOperationId): Task[model.IrisOperation] = { - val cxnIO = sql""" - |SELECT foo FROM public.iris_operations - |""".stripMargin.query[String].unique - - cxnIO - .transact(xa) - .map(model.IrisOperation.apply) - } - - override def saveOperations(ops: Seq[model.IrisOperation]): Task[Unit] = ZIO.unit -} - -object JdbcOperationsRepository { - val layer: URLayer[Transactor[Task], OperationsRepository[Task]] = - ZLayer.fromFunction(new JdbcOperationsRepository(_)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/TransactorLayer.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/TransactorLayer.scala deleted file mode 100644 index 5798536d07..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/TransactorLayer.scala +++ /dev/null @@ -1,63 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import cats.effect.{Async, Resource} -import doobie.util.transactor.Transactor -import com.zaxxer.hikari.HikariConfig -import doobie.util.ExecutionContexts -import doobie.hikari.HikariTransactor -import zio.interop.catz.* -import zio.* -import cats.effect.std.Dispatcher - -object TransactorLayer { - - case class DbConfig( - username: String, - password: String, - jdbcUrl: String, - awaitConnectionThreads: Int = 8 - ) - - def hikari[A[_]: Async: Dispatcher](config: DbConfig)(using tag: Tag[Transactor[A]]): TaskLayer[Transactor[A]] = { - val transactorLayerZio = ZIO - .attempt { - // https://github.com/brettwooldridge/HikariCP/wiki/About-Pool-Sizing - val poolSize = (config.awaitConnectionThreads * 2) + 1 - val hikariConfig = makeHikariConfig(config) - hikariConfig.setPoolName("DBPool") - hikariConfig.setLeakDetectionThreshold(300000) // 5 mins - hikariConfig.setMinimumIdle(poolSize) - hikariConfig.setMaximumPoolSize(poolSize) // Both Pool size amd Minimum Idle should same and is recommended - hikariConfig - } - .map { hikariConfig => - val pool: Resource[A, Transactor[A]] = for { - // Resource yielding a transactor configured with a bounded connect EC and an unbounded - // transaction EC. Everything will be closed and shut down cleanly after use. - ec <- ExecutionContexts.fixedThreadPool[A](config.awaitConnectionThreads) // our connect EC - xa <- HikariTransactor.fromHikariConfig[A](hikariConfig, ec) - } yield xa - - pool.toManaged.toLayer[Transactor[A]] - } - - ZLayer.fromZIO(transactorLayerZio).flatten - } - - private def makeHikariConfig(config: DbConfig): HikariConfig = { - val hikariConfig = HikariConfig() - - hikariConfig.setJdbcUrl(config.jdbcUrl) - hikariConfig.setUsername(config.username) - hikariConfig.setPassword(config.password) - hikariConfig.setAutoCommit(false) - - hikariConfig.setDriverClassName("org.postgresql.Driver") - hikariConfig.addDataSourceProperty("cachePrepStmts", "true") - hikariConfig.addDataSourceProperty("prepStmtCacheSize", "250") - hikariConfig.addDataSourceProperty("prepStmtCacheSqlLimit", "2048") - - hikariConfig - } - -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/package.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/package.scala deleted file mode 100644 index d9e914bec9..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/package.scala +++ /dev/null @@ -1,12 +0,0 @@ -package io.iohk.atala.iris.sql - -import doobie.* -import fs2.Stream -import zio.stream as zstream - -package object repository { - type IO[A] = ConnectionIO[A] - type StreamIO[A] = Stream[ConnectionIO, A] - - type StreamZIO[A] = zstream.Stream[Throwable, A] -} diff --git a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/PublishedBatchData.scala b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/PublishedBatchData.scala deleted file mode 100644 index 2866d4dfd1..0000000000 --- a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/PublishedBatchData.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.pollux.core.model - -import io.iohk.atala.pollux.vc.jwt.W3cCredentialPayload -import io.iohk.atala.prism.crypto.MerkleInclusionProof -import io.iohk.atala.iris.proto.service.IrisOperationId - -final case class PublishedBatchData( - operationId: IrisOperationId, - credentialsAnsProofs: Seq[(W3cCredentialPayload, MerkleInclusionProof)] -) diff --git a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/error/CredentialServiceError.scala b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/error/CredentialServiceError.scala index f1c27236ce..140013be7c 100644 --- a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/error/CredentialServiceError.scala +++ b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/error/CredentialServiceError.scala @@ -22,7 +22,6 @@ object CredentialServiceError { final case class CreateCredentialPayloadFromRecordError(cause: Throwable) extends CredentialServiceError final case class CredentialRequestValidationError(error: String) extends CredentialServiceError final case class CredentialIdNotDefined(credential: W3cCredentialPayload) extends CredentialServiceError - final case class IrisError(cause: Throwable) extends CredentialServiceError final case class CredentialSchemaError(cause: io.iohk.atala.pollux.core.model.error.CredentialSchemaError) extends CredentialServiceError final case class UnsupportedVCClaimsValue(error: String) extends CredentialServiceError diff --git a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/service/MockCredentialService.scala b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/service/MockCredentialService.scala index 36b5fcec3a..c68cf44984 100644 --- a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/service/MockCredentialService.scala +++ b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/service/MockCredentialService.scala @@ -5,7 +5,7 @@ import io.iohk.atala.castor.core.model.did.CanonicalPrismDID import io.iohk.atala.mercury.model.DidId import io.iohk.atala.mercury.protocol.issuecredential.{IssueCredential, OfferCredential, RequestCredential} import io.iohk.atala.pollux.core.model.error.CredentialServiceError -import io.iohk.atala.pollux.core.model.{DidCommID, IssueCredentialRecord, PublishedBatchData} +import io.iohk.atala.pollux.core.model.{DidCommID, IssueCredentialRecord} import io.iohk.atala.pollux.vc.jwt.{Issuer, W3cCredentialPayload} import io.iohk.atala.prism.crypto.MerkleInclusionProof import io.iohk.atala.shared.models.WalletAccessContext @@ -57,8 +57,6 @@ object MockCredentialService extends Mock[CredentialService] { object AcceptCredentialRequest extends Effect[DidCommID, CredentialServiceError, IssueCredentialRecord] object GenerateJWTCredential extends Effect[DidCommID, CredentialServiceError, IssueCredentialRecord] object GenerateAnonCredsCredential extends Effect[DidCommID, CredentialServiceError, IssueCredentialRecord] - object PublishCredentialBatch - extends Effect[(Seq[W3cCredentialPayload], Issuer), CredentialServiceError, PublishedBatchData] object MarkCredentialRecordsAsPublishQueued extends Effect[Seq[(W3cCredentialPayload, MerkleInclusionProof)], CredentialServiceError, Int] object ReceiveCredentialIssue extends Effect[IssueCredential, CredentialServiceError, IssueCredentialRecord] diff --git a/pollux/lib/core/src/test/scala/io/iohk/atala/pollux/core/service/CredentialServiceSpecHelper.scala b/pollux/lib/core/src/test/scala/io/iohk/atala/pollux/core/service/CredentialServiceSpecHelper.scala index 6e92687262..10b986d98e 100644 --- a/pollux/lib/core/src/test/scala/io/iohk/atala/pollux/core/service/CredentialServiceSpecHelper.scala +++ b/pollux/lib/core/src/test/scala/io/iohk/atala/pollux/core/service/CredentialServiceSpecHelper.scala @@ -1,13 +1,11 @@ package io.iohk.atala.pollux.core.service import io.circe.Json -import io.grpc.ManagedChannelBuilder import io.iohk.atala.agent.walletapi.memory.GenericSecretStorageInMemory import io.iohk.atala.agent.walletapi.service.ManagedDIDService import io.iohk.atala.agent.walletapi.storage.GenericSecretStorage import io.iohk.atala.castor.core.model.did.PrismDID import io.iohk.atala.castor.core.service.DIDService -import io.iohk.atala.iris.proto.service.IrisServiceGrpc import io.iohk.atala.mercury.model.{AttachmentDescriptor, DidId} import io.iohk.atala.mercury.protocol.issuecredential.* import io.iohk.atala.pollux.core.model.* @@ -21,9 +19,6 @@ import java.util.UUID trait CredentialServiceSpecHelper { - protected val irisStubLayer = ZLayer.fromZIO( - ZIO.succeed(IrisServiceGrpc.stub(ManagedChannelBuilder.forAddress("localhost", 9999).usePlaintext.build)) - ) protected val didResolverLayer = ZLayer.fromZIO(ZIO.succeed(makeResolver(Map.empty))) protected val defaultWalletLayer = ZLayer.succeed(WalletAccessContext(WalletId.default)) diff --git a/prism-agent/service/server/src/main/resources/application.conf b/prism-agent/service/server/src/main/resources/application.conf index 15bb0db1a2..f220464738 100644 --- a/prism-agent/service/server/src/main/resources/application.conf +++ b/prism-agent/service/server/src/main/resources/application.conf @@ -1,15 +1,6 @@ devMode = false devMode = ${?DEV_MODE} -iris { - service { - host = "localhost" - host = ${?IRIS_HOST} - port = 8081 - port = ${?IRIS_PORT} - } -} - prismNode { service = { host = "localhost" diff --git a/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/Modules.scala b/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/Modules.scala index a8854e0a8b..38d4836efc 100644 --- a/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/Modules.scala +++ b/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/Modules.scala @@ -34,8 +34,6 @@ import io.iohk.atala.iam.authentication.apikey.AuthenticationRepository import io.iohk.atala.iam.authentication.oidc.KeycloakAuthenticatorImpl import io.iohk.atala.iam.authentication.oidc.KeycloakClientImpl import io.iohk.atala.iam.authentication.oidc.KeycloakConfig -import io.iohk.atala.iris.proto.service.IrisServiceGrpc -import io.iohk.atala.iris.proto.service.IrisServiceGrpc.IrisServiceStub import io.iohk.atala.pollux.vc.jwt.{PrismDidResolver, DidResolver as JwtDidResolver} import io.iohk.atala.prism.protos.node_api.NodeServiceGrpc import io.iohk.atala.shared.db.{ContextAwareTask, DbConfig, TransactorLayer} @@ -94,21 +92,6 @@ object AppModule { } object GrpcModule { - // TODO: once Castor + Pollux has migrated to use Node 2.0 stubs, this should be removed. - val irisStubLayer: TaskLayer[IrisServiceStub] = { - val stubLayer = ZLayer.fromZIO( - ZIO - .service[AppConfig] - .map(_.iris.service) - .flatMap(config => - ZIO.attempt( - IrisServiceGrpc.stub(ManagedChannelBuilder.forAddress(config.host, config.port).usePlaintext.build) - ) - ) - ) - SystemModule.configLayer >>> stubLayer - } - val prismNodeStubLayer: TaskLayer[NodeServiceGrpc.NodeServiceStub] = { val stubLayer = ZLayer.fromZIO( ZIO diff --git a/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/config/AppConfig.scala b/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/config/AppConfig.scala index 931caf43e8..b13225e9ea 100644 --- a/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/config/AppConfig.scala +++ b/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/config/AppConfig.scala @@ -13,7 +13,6 @@ import scala.util.Try final case class AppConfig( devMode: Boolean, - iris: IrisConfig, pollux: PolluxConfig, agent: AgentConfig, connect: ConnectConfig, @@ -31,8 +30,6 @@ object AppConfig { final case class VaultConfig(address: String, token: String) -final case class IrisConfig(service: GrpcServiceConfig) - final case class PolluxConfig( database: DatabaseConfig, issueBgJobRecordsLimit: Int, diff --git a/prism-agent/service/server/src/main/scala/io/iohk/atala/issue/controller/IssueController.scala b/prism-agent/service/server/src/main/scala/io/iohk/atala/issue/controller/IssueController.scala index af6bcd083e..6fec924fc2 100644 --- a/prism-agent/service/server/src/main/scala/io/iohk/atala/issue/controller/IssueController.scala +++ b/prism-agent/service/server/src/main/scala/io/iohk/atala/issue/controller/IssueController.scala @@ -60,8 +60,6 @@ object IssueController { ErrorResponse.badRequest(title = "Create Request Validation Error", detail = Some(msg)) case CredentialServiceError.CredentialIdNotDefined(msg) => ErrorResponse.badRequest(title = "Credential ID not defined one request", detail = Some(msg.toString)) - case CredentialServiceError.IrisError(msg) => - ErrorResponse.internalServerError(title = "VDR Error", detail = Some(msg.toString)) case CredentialServiceError.CredentialSchemaError(e) => ErrorResponse.badRequest(title = "Credential Schema Error", detail = Some(e.message)) case CredentialServiceError.UnsupportedVCClaimsValue(error) => diff --git a/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerSpec.scala b/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerSpec.scala index d60e8a6178..880e697c89 100644 --- a/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerSpec.scala +++ b/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerSpec.scala @@ -116,12 +116,6 @@ object IssueControllerSpec extends ZIOSpecDefault { ) assert(httpError)(equalTo(errorResponse)) }, - test("return internal server error if iris error") { - val cse = CredentialServiceError.IrisError(new Throwable("message")) - val httpError = IssueController.toHttpError(cse) - val errorResponse = ErrorResponse.internalServerError(title = "VDR Error", detail = Some(cse.cause.toString)) - assert(httpError)(equalTo(errorResponse)) - } ) } diff --git a/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerTestTools.scala b/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerTestTools.scala index 5883b36e7e..3ffba16d91 100644 --- a/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerTestTools.scala +++ b/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerTestTools.scala @@ -1,7 +1,6 @@ package io.iohk.atala.issue.controller import com.typesafe.config.ConfigFactory -import io.grpc.ManagedChannelBuilder import io.iohk.atala.agent.server.config.AppConfig import io.iohk.atala.agent.walletapi.memory.GenericSecretStorageInMemory import io.iohk.atala.agent.walletapi.model.BaseEntity @@ -12,7 +11,6 @@ import io.iohk.atala.connect.core.repository.ConnectionRepositoryInMemory import io.iohk.atala.connect.core.service.ConnectionServiceImpl import io.iohk.atala.iam.authentication.AuthenticatorWithAuthZ import io.iohk.atala.iam.authentication.DefaultEntityAuthenticator -import io.iohk.atala.iris.proto.service.IrisServiceGrpc import io.iohk.atala.issue.controller.http.{ CreateIssueCredentialRecordRequest, IssueCredentialRecord, @@ -51,10 +49,6 @@ trait IssueControllerTestTools extends PostgresTestContainerSupport { Response[ Either[DeserializationException[String], IssueCredentialRecordPage] ] - - val irisStubLayer = ZLayer.fromZIO( - ZIO.succeed(IrisServiceGrpc.stub(ManagedChannelBuilder.forAddress("localhost", 9999).usePlaintext.build)) - ) val didResolverLayer = ZLayer.fromZIO(ZIO.succeed(makeResolver(Map.empty))) val configLayer: Layer[ReadError[String], AppConfig] = ZLayer.fromZIO { @@ -84,7 +78,6 @@ trait IssueControllerTestTools extends PostgresTestContainerSupport { private val controllerLayer = contextAwareTransactorLayer >+> configLayer >+> - irisStubLayer >+> didResolverLayer >+> ResourceURIDereferencerImpl.layer >+> CredentialRepositoryInMemory.layer >+> From 5318657a50923ef7367bda690a98c0f8c2ce6ae6 Mon Sep 17 00:00:00 2001 From: Anton Baliasnikov Date: Fri, 27 Oct 2023 14:19:17 +0100 Subject: [PATCH 2/2] chore: update .env file with each release (#771) Signed-off-by: Anton Baliasnikov --- package.json | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index e2ae4edcf3..69524e2af8 100644 --- a/package.json +++ b/package.json @@ -38,6 +38,12 @@ "prepareCmd": "npm version ${nextRelease.version} --git-tag-version false" } ], + [ + "@semantic-release/exec", + { + "prepareCmd": "sed -i.bak \"s/PRISM_AGENT_VERSION=.*/PRISM_AGENT_VERSION=${nextRelease.version}/\" ./infrastructure/local/.env && rm -f ./infrastructure/local/.env.bak" + } + ], [ "@semantic-release/exec", { @@ -104,9 +110,10 @@ "prism-agent/service/api/http/prism-agent-openapi-spec.yaml", "infrastructure/charts/agent/Chart.yaml", "infrastructure/charts/index.yaml", - "infrastructure/charts/*.tgz" + "infrastructure/charts/*.tgz", + "infrastructure/local/.env" ], - "message": "chore(release): cut atala prism ${nextRelease.version} release\n\n${nextRelease.notes}\n\nSigned-off-by: Anton Baliasnikov " + "message": "chore(release): cut open enterprise agent ${nextRelease.version} release\n\n${nextRelease.notes}\n\nSigned-off-by: Anton Baliasnikov " } ], [ @@ -116,7 +123,7 @@ "notifyOnFail": true, "markdownReleaseNotes": true, "onSuccessTemplate": { - "text": "A new version of Atala PRISM successfully released!\nVersion: `$npm_package_version`\nTag: $repo_url/releases/tag/$npm_package_version\n\nRelease notes:\n$release_notes" + "text": "A new version of Opent Enterprise Agent successfully released!\nVersion: `$npm_package_version`\nTag: $repo_url/releases/tag/$npm_package_version\n\nRelease notes:\n$release_notes" } } ]