diff --git a/.github/workflows/flow-gcs-test.yaml b/.github/workflows/flow-gcs-test.yaml new file mode 100644 index 000000000..425e24e97 --- /dev/null +++ b/.github/workflows/flow-gcs-test.yaml @@ -0,0 +1,113 @@ +## +# Copyright (C) 2023-2024 Hedera Hashgraph, LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +## + +name: "Test GCS as bucket storage" + +on: + workflow_dispatch: + workflow_call: + +defaults: + run: + shell: bash + +permissions: + id-token: write + contents: read + actions: read + +jobs: + gcs-storage-test: + timeout-minutes: 20 + runs-on: solo-linux-large + strategy: + matrix: + storageType: ["gcs_only", "gcs_and_minio"] + steps: + - name: Harden Runner + uses: step-security/harden-runner@0080882f6c36860b6ba35c610c98ce87d4e2f26f # v2.10.2 + with: + egress-policy: audit + + - name: Checkout Code + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: Authenticate to Google Cloud + id: google-auth + uses: google-github-actions/auth@6fc4af4b145ae7821d527454aa9bd537d1f2dc5f # v2.1.7 + with: + workload_identity_provider: "projects/652966097426/locations/global/workloadIdentityPools/solo-bucket-dev-pool/providers/gh-provider" + service_account: "solo-bucket-reader-writer@solo-bucket-dev.iam.gserviceaccount.com" + + - name: Setup Google Cloud SDK + uses: google-github-actions/setup-gcloud@6189d56e4096ee891640bb02ac264be376592d6a # v2.1.2 + + - name: Get Current Job Log URL + uses: Tiryoh/gha-jobid-action@v1 + id: jobs + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + job_name: "gcs-storage-test (${{ matrix.storageType }})" + + - name: Create GCS bucket + # create a new bucket and use job runner id as prefix + run: | + export BUCKET_NAME=${{ steps.jobs.outputs.job_id }}-solo-streams + gcloud storage buckets create gs://${BUCKET_NAME} --project=${{ vars.GCP_S3_PROJECT_ID }} + echo "BUCKET_NAME=${BUCKET_NAME}" >> $GITHUB_ENV + + - name: Setup Node + uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + with: + node-version: 20 + cache: npm + + - name: Setup Helm + uses: azure/setup-helm@fe7b79cd5ee1e45176fcad797de68ecaf3ca4814 # v4.2.0 + with: + version: "v3.12.3" # helm version + + - name: Setup Kind + uses: helm/kind-action@0025e74a8c7512023d06dc019c617aa3cf561fde # v1.10.0 + with: + install_only: true + node_image: kindest/node:v1.27.3@sha256:3966ac761ae0136263ffdb6cfd4db23ef8a83cba8a463690e98317add2c9ba72 + version: v0.21.0 + kubectl_version: v1.28.6 + verbosity: 3 + wait: 120s + + - name: Install Dependencies + id: npm-deps + run: | + npm ci + npm install -g @hashgraph/solo + + - name: Compile Project + run: npm run build + + - name: Run GCS Test Script for type ${{ matrix.channel }} + env: + GCS_ACCESS_KEY: ${{ secrets.GCP_S3_ACCESS_KEY }} + GCS_SECRET_KEY: ${{ secrets.GCP_S3_SECRET_KEY }} + BUCKET_NAME: ${{ env.BUCKET_NAME }} + STORAGE_TYPE: ${{ matrix.storageType }} + run: | + .github/workflows/script/gcs_test.sh + + - name: Delete Bucket after Test + run: | + gcloud storage rm --recursive gs://${BUCKET_NAME} --project=${{ vars.GCP_S3_PROJECT_ID }} diff --git a/.github/workflows/script/gcs_test.sh b/.github/workflows/script/gcs_test.sh new file mode 100755 index 000000000..1a773134c --- /dev/null +++ b/.github/workflows/script/gcs_test.sh @@ -0,0 +1,60 @@ +#!/bin/bash +set -eo pipefail + +source .github/workflows/script/helper.sh + +if [ -z "${GCS_ACCESS_KEY}" ]; then + echo "GCS_ACCESS_KEY is not set. Exiting..." + exit 1 +fi + +if [ -z "${GCS_SECRET_KEY}" ]; then + echo "GCS_SECRET_KEY is not set. Exiting..." + exit 1 +fi + +if [ -z "${BUCKET_NAME}" ]; then + streamBucket="solo-ci-test-streams" +else + streamBucket=${BUCKET_NAME} +fi + +if [ -z "${STORAGE_TYPE}" ]; then + storageType="gcs_and_minio" +else + storageType=${STORAGE_TYPE} +fi + +echo "Using bucket name: ${streamBucket}" +echo "Test storage type: ${storageType}" + +SOLO_CLUSTER_NAME=solo-e2e +SOLO_NAMESPACE=solo-e2e +SOLO_CLUSTER_SETUP_NAMESPACE=solo-setup + +kind delete cluster -n "${SOLO_CLUSTER_NAME}" +kind create cluster -n "${SOLO_CLUSTER_NAME}" +npm run solo-test -- init +npm run solo-test -- cluster setup \ + -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" +npm run solo-test -- node keys --gossip-keys --tls-keys -i node1 +npm run solo-test -- network deploy -i node1 -n "${SOLO_NAMESPACE}" \ + --storage-endpoint "https://storage.googleapis.com" \ + --storage-access-key "${GCS_ACCESS_KEY}" --storage-secrets "${GCS_SECRET_KEY}" \ + --storage-type "${storageType}" --storage-bucket "${streamBucket}" + +npm run solo-test -- node setup -i node1 -n "${SOLO_NAMESPACE}" +npm run solo-test -- node start -i node1 -n "${SOLO_NAMESPACE}" +npm run solo-test -- mirror-node deploy --namespace "${SOLO_NAMESPACE}" \ + --storage-endpoint "https://storage.googleapis.com" \ + --storage-access-key "${GCS_ACCESS_KEY}" --storage-secrets "${GCS_SECRET_KEY}" \ + --storage-type "${storageType}" --storage-bucket "${streamBucket}" + +kubectl port-forward -n "${SOLO_NAMESPACE}" svc/haproxy-node1-svc 50211:50211 > /dev/null 2>&1 & +kubectl port-forward -n "${SOLO_NAMESPACE}" svc/hedera-explorer 8080:80 > /dev/null 2>&1 & + +cd ..; create_test_account ; cd - + +node examples/create-topic.js + +npm run solo-test -- node stop -i node1 -n "${SOLO_NAMESPACE}" diff --git a/.github/workflows/script/helper.sh b/.github/workflows/script/helper.sh new file mode 100644 index 000000000..ffed2745d --- /dev/null +++ b/.github/workflows/script/helper.sh @@ -0,0 +1,37 @@ +#!/bin/bash +set -eo pipefail + +function create_test_account () +{ + echo "Create test account with solo network" + cd solo + + # create new account and extract account id + npm run solo-test -- account create -n solo-e2e --hbar-amount 100 --generate-ecdsa-key --set-alias > test.log + export OPERATOR_ID=$(grep "accountId" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') + echo "OPERATOR_ID=${OPERATOR_ID}" + rm test.log + + # get private key of the account + npm run solo-test -- account get -n solo-e2e --account-id ${OPERATOR_ID} --private-key > test.log + export OPERATOR_KEY=$(grep "privateKey" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') + export CONTRACT_TEST_KEY_ONE=0x$(grep "privateKeyRaw" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') + echo "CONTRACT_TEST_KEY_ONE=${CONTRACT_TEST_KEY_ONE}" + rm test.log + + npm run solo-test -- account create -n solo-e2e --hbar-amount 100 --generate-ecdsa-key --set-alias > test.log + export SECOND_KEY=$(grep "accountId" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') + npm run solo-test -- account get -n solo-e2e --account-id ${SECOND_KEY} --private-key > test.log + export CONTRACT_TEST_KEY_TWO=0x$(grep "privateKeyRaw" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') + echo "CONTRACT_TEST_KEY_TWO=${CONTRACT_TEST_KEY_TWO}" + rm test.log + + export CONTRACT_TEST_KEYS=${CONTRACT_TEST_KEY_ONE},$'\n'${CONTRACT_TEST_KEY_TWO} + export HEDERA_NETWORK="local-node" + + echo "OPERATOR_KEY=${OPERATOR_KEY}" + echo "HEDERA_NETWORK=${HEDERA_NETWORK}" + echo "CONTRACT_TEST_KEYS=${CONTRACT_TEST_KEYS}" + + cd - +} diff --git a/.github/workflows/script/solo_smoke_test.sh b/.github/workflows/script/solo_smoke_test.sh index b7b0f17fc..199d714a4 100755 --- a/.github/workflows/script/solo_smoke_test.sh +++ b/.github/workflows/script/solo_smoke_test.sh @@ -9,7 +9,7 @@ set -eo pipefail # Then run smart contract test, and also javascript sdk sample test to interact with solo network # -function_name="" +source .github/workflows/script/helper.sh function enable_port_forward () { @@ -73,41 +73,6 @@ function start_contract_test () return $result } -function create_test_account () -{ - echo "Create test account with solo network" - cd solo - - # create new account and extract account id - npm run solo-test -- account create -n solo-e2e --hbar-amount 100 --generate-ecdsa-key --set-alias > test.log - export OPERATOR_ID=$(grep "accountId" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') - echo "OPERATOR_ID=${OPERATOR_ID}" - rm test.log - - # get private key of the account - npm run solo-test -- account get -n solo-e2e --account-id ${OPERATOR_ID} --private-key > test.log - export OPERATOR_KEY=$(grep "privateKey" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') - export CONTRACT_TEST_KEY_ONE=0x$(grep "privateKeyRaw" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') - echo "CONTRACT_TEST_KEY_ONE=${CONTRACT_TEST_KEY_ONE}" - rm test.log - - npm run solo-test -- account create -n solo-e2e --hbar-amount 100 --generate-ecdsa-key --set-alias > test.log - export SECOND_KEY=$(grep "accountId" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') - npm run solo-test -- account get -n solo-e2e --account-id ${SECOND_KEY} --private-key > test.log - export CONTRACT_TEST_KEY_TWO=0x$(grep "privateKeyRaw" test.log | awk '{print $2}' | sed 's/"//g'| sed 's/,//g') - echo "CONTRACT_TEST_KEY_TWO=${CONTRACT_TEST_KEY_TWO}" - rm test.log - - export CONTRACT_TEST_KEYS=${CONTRACT_TEST_KEY_ONE},$'\n'${CONTRACT_TEST_KEY_TWO} - export HEDERA_NETWORK="local-node" - - echo "OPERATOR_KEY=${OPERATOR_KEY}" - echo "HEDERA_NETWORK=${HEDERA_NETWORK}" - echo "CONTRACT_TEST_KEYS=${CONTRACT_TEST_KEYS}" - - cd - -} - function start_sdk_test () { cd solo diff --git a/examples/create-topic.js b/examples/create-topic.js index f44a957a3..605d3759d 100644 --- a/examples/create-topic.js +++ b/examples/create-topic.js @@ -14,9 +14,18 @@ * limitations under the License. * */ -import {Wallet, LocalProvider, TopicCreateTransaction, TopicMessageSubmitTransaction} from '@hashgraph/sdk'; +import { + Wallet, + LocalProvider, + TopicCreateTransaction, + TopicMessageSubmitTransaction, + AccountCreateTransaction, + PrivateKey, + Hbar, +} from '@hashgraph/sdk'; import dotenv from 'dotenv'; +import http from 'http'; dotenv.config(); @@ -30,12 +39,11 @@ async function main() { const wallet = new Wallet(process.env.OPERATOR_ID, process.env.OPERATOR_KEY, provider); + const TEST_MESSAGE = 'Hello World'; try { - console.log('before create topic'); // create topic let transaction = await new TopicCreateTransaction().freezeWithSigner(wallet); transaction = await transaction.signWithSigner(wallet); - console.log('after sign transaction'); const createResponse = await transaction.executeWithSigner(wallet); const createReceipt = await createResponse.getReceiptWithSigner(wallet); @@ -44,7 +52,7 @@ async function main() { // send one message let topicMessageSubmitTransaction = await new TopicMessageSubmitTransaction({ topicId: createReceipt.topicId, - message: 'Hello World', + message: TEST_MESSAGE, }).freezeWithSigner(wallet); topicMessageSubmitTransaction = await topicMessageSubmitTransaction.signWithSigner(wallet); const sendResponse = await topicMessageSubmitTransaction.executeWithSigner(wallet); @@ -52,10 +60,65 @@ async function main() { const sendReceipt = await sendResponse.getReceiptWithSigner(wallet); console.log(`topic sequence number = ${sendReceipt.topicSequenceNumber.toString()}`); + + await new Promise(resolve => setTimeout(resolve, 1000)); + + // send a create account transaction to push record stream files to mirror node + const newKey = PrivateKey.generate(); + let accountCreateTransaction = await new AccountCreateTransaction() + .setInitialBalance(new Hbar(10)) + .setKey(newKey.publicKey) + .freezeWithSigner(wallet); + accountCreateTransaction = await accountCreateTransaction.signWithSigner(wallet); + const accountCreationResponse = await accountCreateTransaction.executeWithSigner(wallet); + const accountCreationReceipt = await accountCreationResponse.getReceiptWithSigner(wallet); + console.log(`account id = ${accountCreationReceipt.accountId.toString()}`); + + await new Promise(resolve => setTimeout(resolve, 1000)); + + // Check submit message result should success + const queryURL = `http://localhost:8080/api/v1/topics/${createReceipt.topicId}/messages`; + let received = false; + let receivedMessage = ''; + + // wait until the transaction reached consensus and retrievable from the mirror node API + let retry = 0; + while (!received && retry < 10) { + const req = http.request(queryURL, {method: 'GET', timeout: 100, headers: {Connection: 'close'}}, res => { + res.setEncoding('utf8'); + res.on('data', chunk => { + // convert chunk to json object + const obj = JSON.parse(chunk); + if (obj.messages.length === 0) { + console.log('No messages yet'); + } else { + // convert message from base64 to utf-8 + const base64 = obj.messages[0].message; + const buff = Buffer.from(base64, 'base64'); + receivedMessage = buff.toString('utf-8'); + console.log(`Received message: ${receivedMessage}`); + received = true; + } + }); + }); + req.on('error', e => { + console.log(`problem with request: ${e.message}`); + }); + req.end(); // make the request + // wait and try again + await new Promise(resolve => setTimeout(resolve, 1000)); + retry++; + } + if (receivedMessage === TEST_MESSAGE) { + console.log('Message received successfully'); + } else { + console.error('Message received but not match: ' + receivedMessage); + // eslint-disable-next-line n/no-process-exit + process.exit(1); + } } catch (error) { console.error(error); } - provider.close(); } diff --git a/src/commands/flags.ts b/src/commands/flags.ts index e73cee854..3c57fedad 100644 --- a/src/commands/flags.ts +++ b/src/commands/flags.ts @@ -1619,6 +1619,62 @@ export class Flags { prompt: undefined, }; + static readonly storageType: CommandFlag = { + constName: 'storageType', + name: 'storage-type', + definition: { + defaultValue: constants.StorageType.MINIO_ONLY, + describe: + 'storage type for saving stream files, available options are minio_only, gcs_and_minio, s3_only, gcs_only, s3_and_gcs', + type: 'StorageType', + }, + prompt: undefined, + }; + + static readonly storageAccessKey: CommandFlag = { + constName: 'storageAccessKey', + name: 'storage-access-key', + definition: { + defaultValue: '', + describe: 'storage access key', + type: 'string', + }, + prompt: undefined, + }; + + static readonly storageSecrets: CommandFlag = { + constName: 'storageSecrets', + name: 'storage-secrets', + definition: { + defaultValue: '', + describe: 'storage secret key', + type: 'string', + }, + prompt: undefined, + }; + + static readonly storageEndpoint: CommandFlag = { + constName: 'storageEndpoint', + name: 'storage-endpoint', + definition: { + defaultValue: '', + describe: 'storage endpoint URL', + type: 'string', + }, + prompt: undefined, + }; + + static readonly storageBucket: CommandFlag = { + constName: 'storageBucket', + name: 'storage-bucket', + definition: { + defaultValue: '', + describe: 'name of storage bucket', + type: 'string', + }, + prompt: undefined, + }; + static readonly allFlags: CommandFlag[] = [ Flags.accountId, Flags.amount, @@ -1696,6 +1752,11 @@ export class Flags { Flags.soloChartVersion, Flags.stakeAmounts, Flags.stateFile, + Flags.storageType, + Flags.storageAccessKey, + Flags.storageSecrets, + Flags.storageEndpoint, + Flags.storageBucket, Flags.tlsClusterIssuerType, Flags.tlsPrivateKey, Flags.tlsPublicKey, diff --git a/src/commands/mirror_node.ts b/src/commands/mirror_node.ts index 937ab6790..b028d16ab 100644 --- a/src/commands/mirror_node.ts +++ b/src/commands/mirror_node.ts @@ -51,6 +51,11 @@ interface MirrorNodeDeployConfigClass { clusterSetupNamespace: string; soloChartVersion: string; pinger: boolean; + storageType: constants.StorageType; + storageAccessKey: string; + storageSecrets: string; + storageEndpoint: string; + storageBucket: string; } interface Context { @@ -95,6 +100,11 @@ export class MirrorNodeCommand extends BaseCommand { flags.pinger, flags.clusterSetupNamespace, flags.soloChartVersion, + flags.storageType, + flags.storageAccessKey, + flags.storageSecrets, + flags.storageEndpoint, + flags.storageBucket, ]; } @@ -163,7 +173,7 @@ export class MirrorNodeCommand extends BaseCommand { return valuesArg; } - async prepareValuesArg(config: {valuesFile: string}) { + async prepareValuesArg(config: MirrorNodeDeployConfigClass) { let valuesArg = ''; const profileName = this.configManager.getFlag(flags.profileName) as string; @@ -176,6 +186,28 @@ export class MirrorNodeCommand extends BaseCommand { valuesArg += this.prepareValuesFiles(config.valuesFile); } + if (config.storageBucket) { + valuesArg += ` --set importer.config.hedera.mirror.importer.downloader.bucketName=${config.storageBucket}`; + } + + let storageType = ''; + if (config.storageType && config.storageAccessKey && config.storageSecrets && config.storageEndpoint) { + if ( + config.storageType === constants.StorageType.GCS_ONLY || + config.storageType === constants.StorageType.S3_AND_GCS || + config.storageType === constants.StorageType.GCS_AND_MINIO + ) { + storageType = 'gcp'; + } else if (config.storageType === constants.StorageType.S3_ONLY) { + storageType = 's3'; + } else { + throw new IllegalArgumentError(`Invalid cloud storage type: ${config.storageType}`); + } + valuesArg += ` --set importer.env.HEDERA_MIRROR_IMPORTER_DOWNLOADER_SOURCES_0_TYPE=${storageType}`; + valuesArg += ` --set importer.env.HEDERA_MIRROR_IMPORTER_DOWNLOADER_SOURCES_0_URI=${config.storageEndpoint}`; + valuesArg += ` --set importer.env.HEDERA_MIRROR_IMPORTER_DOWNLOADER_SOURCES_0_CREDENTIALS_ACCESSKEY=${config.storageAccessKey}`; + valuesArg += ` --set importer.env.HEDERA_MIRROR_IMPORTER_DOWNLOADER_SOURCES_0_CREDENTIALS_SECRETKEY=${config.storageSecrets}`; + } return valuesArg; } @@ -217,9 +249,10 @@ export class MirrorNodeCommand extends BaseCommand { constants.MIRROR_NODE_CHART, ); - ctx.config.valuesArg = await self.prepareValuesArg(ctx.config); - + // predefined values first ctx.config.valuesArg += this.prepareValuesFiles(constants.MIRROR_NODE_VALUES_FILE); + // user defined values later to override predefined values + ctx.config.valuesArg += await self.prepareValuesArg(ctx.config); if (ctx.config.pinger) { const startAccId = constants.HEDERA_NODE_ACCOUNT_ID_START; diff --git a/src/commands/network.ts b/src/commands/network.ts index b3fd759f9..8488a930a 100644 --- a/src/commands/network.ts +++ b/src/commands/network.ts @@ -39,6 +39,8 @@ import {ConsensusNodeStates} from '../core/config/remote/enumerations.js'; import {EnvoyProxyComponent} from '../core/config/remote/components/envoy_proxy_component.js'; import {HaProxyComponent} from '../core/config/remote/components/ha_proxy_component.js'; import {GenesisNetworkDataConstructor} from '../core/genesis_network_models/genesis_network_data_constructor.js'; +import {v4 as uuidv4} from 'uuid'; +import * as Base64 from 'js-base64'; export interface NetworkDeployConfigClass { applicationEnv: string; @@ -70,6 +72,11 @@ export interface NetworkDeployConfigClass { envoyIps: string; haproxyIpsParsed?: Record; envoyIpsParsed?: Record; + storageType: constants.StorageType; + storageAccessKey: string; + storageSecrets: string; + storageEndpoint: string; + storageBucket: string; } export class NetworkCommand extends BaseCommand { @@ -132,9 +139,81 @@ export class NetworkCommand extends BaseCommand { flags.grpcWebTlsKeyPath, flags.haproxyIps, flags.envoyIps, + flags.storageType, + flags.storageAccessKey, + flags.storageSecrets, + flags.storageEndpoint, + flags.storageBucket, ]; } + async prepareStorageSecrets(config: NetworkDeployConfigClass) { + try { + const minioAccessKey = uuidv4(); + const minioSecretKey = uuidv4(); + const minioData = {}; + const namespace = config.namespace; + + // Generating new minio credentials + const envString = `MINIO_ROOT_USER=${minioAccessKey}\nMINIO_ROOT_PASSWORD=${minioSecretKey}`; + minioData['config.env'] = Base64.encode(envString); + const isMinioSecretCreated = await this.k8.createSecret( + constants.MINIO_SECRET_NAME, + namespace, + 'Opaque', + minioData, + undefined, + true, + ); + if (!isMinioSecretCreated) { + throw new SoloError('ailed to create new minio secret'); + } + + // Generating cloud storage secrets + const {storageAccessKey, storageSecrets, storageEndpoint} = config; + const cloudData = {}; + if ( + config.storageType === constants.StorageType.S3_ONLY || + config.storageType === constants.StorageType.S3_AND_GCS + ) { + cloudData['S3_ACCESS_KEY'] = Base64.encode(storageAccessKey); + cloudData['S3_SECRET_KEY'] = Base64.encode(storageSecrets); + cloudData['S3_ENDPOINT'] = Base64.encode(storageEndpoint); + } + if ( + config.storageType === constants.StorageType.GCS_ONLY || + config.storageType === constants.StorageType.S3_AND_GCS || + config.storageType === constants.StorageType.GCS_AND_MINIO + ) { + cloudData['GCS_ACCESS_KEY'] = Base64.encode(storageAccessKey); + cloudData['GCS_SECRET_KEY'] = Base64.encode(storageSecrets); + cloudData['GCS_ENDPOINT'] = Base64.encode(storageEndpoint); + } + if (config.storageType === constants.StorageType.GCS_AND_MINIO) { + cloudData['S3_ACCESS_KEY'] = Base64.encode(minioAccessKey); + cloudData['S3_SECRET_KEY'] = Base64.encode(minioSecretKey); + } + + const isCloudSecretCreated = await this.k8.createSecret( + constants.UPLOADER_SECRET_NAME, + namespace, + 'Opaque', + cloudData, + undefined, + true, + ); + if (!isCloudSecretCreated) { + throw new SoloError( + `failed to create Kubernetes secret for storage credentials of type '${config.storageType}'`, + ); + } + } catch (e: Error | any) { + const errorMessage = 'failed to create Kubernetes storage secret '; + this.logger.error(errorMessage, e); + throw new SoloError(errorMessage, e); + } + } + async prepareValuesArg(config: { chartDirectory?: string; app?: string; @@ -147,7 +226,12 @@ export class NetworkCommand extends BaseCommand { haproxyIpsParsed?: Record; envoyIpsParsed?: Record; genesisNetworkData: GenesisNetworkDataConstructor; + storageType: constants.StorageType; resolvedThrottlesFile: string; + storageAccessKey: string; + storageSecrets: string; + storageEndpoint: string; + storageBucket: string; }) { let valuesArg = config.chartDirectory ? `-f ${path.join(config.chartDirectory, 'solo-deployment', 'values.yaml')}` @@ -164,6 +248,37 @@ export class NetworkCommand extends BaseCommand { valuesArg = addDebugOptions(valuesArg, config.debugNodeAlias); } + if ( + config.storageType === constants.StorageType.S3_AND_GCS || + config.storageType === constants.StorageType.GCS_ONLY || + config.storageType === constants.StorageType.GCS_AND_MINIO + ) { + valuesArg += ' --set cloud.gcs.enabled=true'; + } + + if ( + config.storageType === constants.StorageType.S3_AND_GCS || + config.storageType === constants.StorageType.S3_ONLY + ) { + valuesArg += ' --set cloud.s3.enabled=true'; + } + + if ( + config.storageType === constants.StorageType.GCS_ONLY || + config.storageType === constants.StorageType.S3_ONLY || + config.storageType === constants.StorageType.S3_AND_GCS + ) { + valuesArg += ' --set cloud.minio.enabled=false'; + } + + if (config.storageType !== constants.StorageType.MINIO_ONLY) { + valuesArg += ' --set cloud.generateNewSecrets=false'; + } + + if (config.storageBucket) { + valuesArg += ` --set cloud.buckets.streamBucket=${config.storageBucket}`; + valuesArg += ` --set minio-server.tenant.buckets[0].name=${config.storageBucket}`; + } const profileName = this.configManager.getFlag(flags.profileName) as string; this.profileValuesFile = await this.profileManager.prepareValuesForSoloChart( profileName, @@ -235,6 +350,11 @@ export class NetworkCommand extends BaseCommand { flags.grpcWebTlsKeyPath, flags.haproxyIps, flags.envoyIps, + flags.storageType, + flags.storageAccessKey, + flags.storageSecrets, + flags.storageEndpoint, + flags.storageBucket, ]); await this.configManager.executePrompt(task, NetworkCommand.DEPLOY_FLAGS_LIST); @@ -299,6 +419,17 @@ export class NetworkCommand extends BaseCommand { fs.mkdirSync(config.keysDir); } + // if storageType is set, then we need to set the storage secrets + if ( + this.configManager.getFlag(flags.storageType) && + this.configManager.getFlag(flags.storageAccessKey) && + this.configManager.getFlag(flags.storageSecrets) && + this.configManager.getFlag(flags.storageEndpoint) + ) { + this.logger.debug('Preparing storage secrets'); + await this.prepareStorageSecrets(config); + } + this.logger.debug('Prepared config', { config, cachedConfig: this.configManager.config, @@ -525,6 +656,11 @@ export class NetworkCommand extends BaseCommand { constants.PODS_RUNNING_MAX_ATTEMPTS, constants.PODS_RUNNING_DELAY, ), + // skip if only cloud storage is/are used + skip: ctx => + ctx.config.storageType === constants.StorageType.GCS_ONLY || + ctx.config.storageType === constants.StorageType.S3_ONLY || + ctx.config.storageType === constants.StorageType.S3_AND_GCS, }); // set up the subtasks diff --git a/src/core/config_manager.ts b/src/core/config_manager.ts index 178264200..168ea14f7 100644 --- a/src/core/config_manager.ts +++ b/src/core/config_manager.ts @@ -24,6 +24,7 @@ import type * as yargs from 'yargs'; import {type CommandFlag} from '../types/flag_types.js'; import {type ListrTaskWrapper} from 'listr2'; import {patchInject} from './container_helper.js'; +import * as constants from '../core/constants.js'; /** * ConfigManager cache command flag values so that user doesn't need to enter the same values repeatedly. @@ -110,6 +111,14 @@ export class ConfigManager { this.config.flags[flag.name] = val === true || val === 'true'; // use comparison to enforce boolean value break; + case 'StorageType': + // @ts-ignore + if (!Object.values(constants.StorageType).includes(`${val}`)) { + throw new SoloError(`Invalid storage type value '${val}'`); + } else { + this.config.flags[flag.name] = val; + } + break; default: throw new SoloError(`Unsupported field type for flag '${flag.name}': ${flag.definition.type}`); } diff --git a/src/core/constants.ts b/src/core/constants.ts index c87e765e6..75d8c4ad8 100644 --- a/src/core/constants.ts +++ b/src/core/constants.ts @@ -202,3 +202,14 @@ export const NETWORK_DESTROY_WAIT_TIMEOUT = +process.env.NETWORK_DESTROY_WAIT_TI export const DEFAULT_LOCAL_CONFIG_FILE = 'local-config.yaml'; export const IGNORED_NODE_ACCOUNT_ID = '0.0.0'; + +export const UPLOADER_SECRET_NAME = 'uploader-mirror-secrets'; +export const MINIO_SECRET_NAME = 'minio-secrets'; + +export const enum StorageType { + MINIO_ONLY = 'minio_only', + GCS_AND_MINIO = 'gcs_and_minio', + S3_ONLY = 's3_only', + GCS_ONLY = 'gcs_only', + S3_AND_GCS = 's3_and_gcs', +} diff --git a/src/core/k8.ts b/src/core/k8.ts index 214058d2b..ea7a8b6f0 100644 --- a/src/core/k8.ts +++ b/src/core/k8.ts @@ -1297,7 +1297,7 @@ export class K8 { /** * Delete a secret from the namespace - * @param name - the name of the new secret + * @param name - the name of the existing secret * @param namespace - the namespace to store the secret * @returns whether the secret was deleted successfully */ diff --git a/test/e2e/commands/mirror_node.test.ts b/test/e2e/commands/mirror_node.test.ts index bca6b4c58..b6b050310 100644 --- a/test/e2e/commands/mirror_node.test.ts +++ b/test/e2e/commands/mirror_node.test.ts @@ -101,6 +101,8 @@ e2eTestSuite(testName, argv, undefined, undefined, undefined, undefined, undefin flags.tlsClusterIssuerType.constName, flags.clusterSetupNamespace.constName, flags.soloChartVersion.constName, + flags.storageSecrets.constName, + flags.storageEndpoint.constName, ]); }).timeout(Duration.ofMinutes(10).toMillis()); diff --git a/test/e2e/commands/network.test.ts b/test/e2e/commands/network.test.ts index 2a8952081..4c5acba64 100644 --- a/test/e2e/commands/network.test.ts +++ b/test/e2e/commands/network.test.ts @@ -102,6 +102,9 @@ describe('NetworkCommand', () => { flags.settingTxt.constName, flags.grpcTlsKeyPath.constName, flags.grpcWebTlsKeyPath.constName, + flags.storageAccessKey.constName, + flags.storageSecrets.constName, + flags.storageEndpoint.constName, ]); } catch (e) { networkCmd.logger.showUserError(e); diff --git a/test/test_util.ts b/test/test_util.ts index 2a43abb5a..e271c4a52 100644 --- a/test/test_util.ts +++ b/test/test_util.ts @@ -289,6 +289,9 @@ export function e2eTestSuite( flags.settingTxt.constName, flags.grpcTlsKeyPath.constName, flags.grpcWebTlsKeyPath.constName, + flags.storageAccessKey.constName, + flags.storageSecrets.constName, + flags.storageEndpoint.constName, ]); }).timeout(Duration.ofMinutes(5).toMillis()); diff --git a/version.ts b/version.ts index 0a3e1eef4..70e9fea32 100644 --- a/version.ts +++ b/version.ts @@ -20,7 +20,7 @@ */ export const HELM_VERSION = 'v3.14.2'; -export const SOLO_CHART_VERSION = '0.39.0'; +export const SOLO_CHART_VERSION = '0.40.0'; export const HEDERA_PLATFORM_VERSION = 'v0.58.1'; export const MIRROR_NODE_VERSION = '0.118.1'; export const HEDERA_EXPLORER_VERSION = '0.2.1';