Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Relay upgrade #373

Merged
merged 21 commits into from
Sep 17, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
e287fe0
refactor(relay): Rename relay setup script to a more descriptive name
kkirkov Jul 29, 2024
1f1b0af
refactor(relay): Add better descriptions to process-compose tasks
kkirkov Jul 29, 2024
98d51ee
refactor(relay): Add ZKEY_DOWNLOAD_URL env var for faster download
kkirkov Jul 29, 2024
04a5c14
feat(relay): Use readiness_probe for proverserver not check-proverser…
kkirkov Jul 29, 2024
2cfa3f1
feat(relay):Log link to transaction
kkirkov Jul 29, 2024
44f34d0
refactor(relay): Add union type `NetworkConfig`
kkirkov Aug 5, 2024
bc0af24
fix(tsconfig): Fix import in a ts file by adding a json to `tsconfig.…
kkirkov Aug 5, 2024
08facde
refactor(env): Remove `getRootDir` func and use `rootDir` everywhere
kkirkov Aug 5, 2024
ecb56d0
refactor(ssz-utils): Add return type to `computeSyncCommitteePeriodAt
kkirkov Aug 5, 2024
88d7573
refactor(relay): Start proverServer from process-compose directly
kkirkov Aug 16, 2024
3b4bee4
refactor(relay): Run redis from the correct ignored folder
kkirkov Aug 26, 2024
d414b19
refactor(relay): Remove `run-relay.sh` - use process-compose only dir…
kkirkov Aug 26, 2024
2686ad2
refactor(relay):Make reyaler work when `REDIS_HOST` and `REDIS_PORT` …
kkirkov Aug 27, 2024
087c6aa
feat(relay): Update relay to work with finalized header DendrethAdada…
Dimo99 Aug 27, 2024
6e6e2e1
feat: Update to the new snarkjs verifier
Dimo99 Sep 4, 2024
c9e6ce4
refactor(relay): Use `follow-network` instead of `network` in logs
kkirkov Sep 5, 2024
85631f0
refactor(.env): Update `.env.example`
kkirkov Sep 10, 2024
88976d9
feat(relay): Add script that msgs a discord channel if a contract is …
kkirkov Sep 11, 2024
644f95d
fix(tests): Update BeaconLightClient tests
Dimo99 Sep 13, 2024
38a6a83
fix(discord-bot): Tag everyone when alerting
Dimo99 Sep 13, 2024
150bb83
feat(relay): Introduce additional networks
Dimo99 Sep 13, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 46 additions & 41 deletions process-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,46 +1,51 @@
processes:
setup:
description: setup
downloadZkAndDatFiles:
description: download .zk and .dat files and verify their checksum
working_dir: ./relay/process-compose-scripts/
command: ./start-setup.sh

proverserver_check:
description: check if proverserver is live
working_dir: ./relay/process-compose-scripts/
command: ./check-proverserver.sh
depends_on:
setup:
condition: process_completed
command: ./download-zk-and-dat-files.sh

redis:
description: redis
log_location: ./logs/redis.log
working_dir: ./relay/process-compose-scripts/
command: ./start-redis.sh
depends_on:
setup:
downloadZkAndDatFiles:
condition: process_completed
availability:
restart: always

proverserver:
description: proverserver
environment:
- 'HOST = ${PROVER_SERVER_HOST}'
log_location: ./logs/prover_server.log
working_dir: ./relay/process-compose-scripts/
command: ./start-proverserver.sh
depends_on:
setup:
downloadZkAndDatFiles:
condition: process_completed
availability:
restart: always
readiness_probe:
http_get:
host: $HOST
scheme: http
path: '/status'
port: ${PROVER_SERVER_PORT}
initial_delay_seconds: 10
period_seconds: 10
success_threshold: 1
timeout_seconds: 1
failure_threshold: 120
kkirkov marked this conversation as resolved.
Show resolved Hide resolved

prometheus:
description: prometheus
log_location: ./logs/prometheus.log
command: ./start-prometheus.sh
working_dir: ./relay/process-compose-scripts/
depends_on:
setup:
downloadZkAndDatFiles:
condition: process_completed
availability:
restart: always
Expand Down Expand Up @@ -73,86 +78,86 @@ processes:
command: yarn ts ./workers/cleaner.ts
working_dir: ./relay/workers
depends_on:
setup:
downloadZkAndDatFiles:
condition: process_completed

sepolia:
description: sepolia
description: Start generating proofs for the contract on ${LC_SEPOLIA} and update it with them
log_location: ./logs/sepolia.log
command: yarn hardhat start-publishing --light-client ${LC_SEPOLIA} --network sepolia --follow-network ${FOLLOW_NETWORK_SEPOLIA} --slots-jump ${SLOTS_JUMP} --hashi ${SEPOLIA_HASHI} --prometheus-port 3004
working_dir: ./beacon-light-client/solidity
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy

fantom:
description: fantom
description: Start generating proofs for the contract on ${LC_FANTOM} and update it with them
log_location: ./logs/fantom.log
command: yarn hardhat start-publishing --light-client ${LC_FANTOM} --network fantom --follow-network ${FOLLOW_NETWORK_FANTOM} --slots-jump ${SLOTS_JUMP} --prometheus-port 3007
working_dir: ./beacon-light-client/solidity
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy

alfajores:
description: alfajores-celo
description: Start generating proofs for the contract on ${LC_ALFAJORES} and update it with them
log_location: ./logs/alfajores.log
command: yarn hardhat start-publishing --light-client ${LC_ALFAJORES} --network celo --follow-network ${FOLLOW_NETWORK_CELO} --slots-jump ${SLOTS_JUMP} --prometheus-port 3008
working_dir: ./beacon-light-client/solidity
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy

bsc:
description: bsc
description: Start generating proofs for the contract on ${LC_BSC} and update it with them
log_location: ./logs/bsc.log
command: yarn hardhat start-publishing --light-client ${LC_BSC} --network bsc --follow-network ${FOLLOW_NETWORK_BSC} --slots-jump ${SLOTS_JUMP} --prometheus-port 3009
working_dir: ./beacon-light-client/solidity
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy

chiado:
description: chiado
description: Start generating proofs for the contract on ${LC_CHIADO} and update it with them
log_location: ./logs/chiado.log
command: yarn hardhat start-publishing --light-client ${LC_CHIADO} --network chiado --follow-network ${FOLLOW_NETWORK_CHIADO} --slots-jump ${SLOTS_JUMP} --hashi ${CHIADO_HASHI} --prometheus-port 3010
working_dir: ./beacon-light-client/solidity
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy

aurora:
description: aurora
description: Start generating proofs for the contract on ${LC_AURORA} and update it with them
log_location: ./logs/aurora.log
command: yarn hardhat start-publishing --light-client ${LC_AURORA} --network aurora --follow-network ${FOLLOW_NETWORK_AURORA} --slots-jump ${SLOTS_JUMP} --prometheus-port 3013
working_dir: ./beacon-light-client/solidity
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy

gnosis:
description: gnosis
description: Start generating proofs for the contract on ${LC_GNOSIS} and update it with them
log_location: ./logs/gnosis.log
command: yarn hardhat start-publishing --light-client ${LC_GNOSIS} --network gnosis --follow-network ${FOLLOW_NETWORK_GNOSIS} --slots-jump ${SLOTS_JUMP} --prometheus-port 3014
working_dir: ./beacon-light-client/solidity
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy

lukso:
description: lukso
description: Start generating proofs for the contract on ${LC_LUKSO} and update it with them
log_location: ./logs/lukso.log
command: yarn hardhat start-publishing --light-client ${LC_LUKSO} --network lukso --follow-network ${FOLLOW_NETWORK_LUKSO} --slots-jump ${SLOTS_JUMP} --hashi ${LUKSO_HASHI} --prometheus-port 3015
working_dir: ./beacon-light-client/solidity
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy

general_logs:
description: general_logs
log_location: ./logs/general_logs.log
command: yarn ts relayer_logger.ts
working_dir: ./relay
depends_on:
proverserver_check:
condition: process_completed
proverserver:
condition: process_healthy
27 changes: 27 additions & 0 deletions relay/implementations/publish_evm_transaction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ export async function publishTransaction(
web3: Web3,
transactionSpeed: TransactionSpeed,
spread?: boolean,
chainId?: number,
) {
const transactionCount = await contract.signer.getTransactionCount();

Expand Down Expand Up @@ -71,6 +72,32 @@ export async function publishTransaction(
});
}

switch (chainId) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here you can use the data-driven programming approach as @PetarKirov suggested

But it can be done in a following PR.

case 11155111: {
console.log(
`A transaction was uploaded, to see it go to: https://sepolia.etherscan.io/tx/${transaction.hash}`,
);
break;
}
case 10200: {
console.log(
`A transaction was uploaded, to see it go to: https://gnosis-chiado.blockscout.com/tx/${transaction.hash}`,
);
break;
}
case 4201: {
console.log(
`A transaction was uploaded, to see it go to: https://explorer.consensus.testnet.lukso.network/tx/${transaction.hash}`,
);
break;
}
default: {
console.log(
`A transaction was uploaded, cant send you to a explorer, because I dont have one for chainId: ${chainId}, transaction hash is: ${transaction.hash}`,
);
}
}

logger.info(JSON.stringify(transaction));

transactionPromise = transaction.wait();
Expand Down
3 changes: 3 additions & 0 deletions relay/on_chain_publisher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,9 @@ export async function postUpdateOnChain(
new Web3(rpcEndpoint!),
transactionSpeed,
true,
(
await hashiAdapterContract.provider.getNetwork()
).chainId,
);
} else {
await lightClientContract.postUpdateOnChain({
Expand Down
19 changes: 0 additions & 19 deletions relay/process-compose-scripts/check-proverserver.sh

This file was deleted.

kkirkov marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -16,31 +16,48 @@ calculate_checksum() {
}

download_zkey_file() {
echo "Downloading zkey file from https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey ..."

curl https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey >"${GIT_ROOT}/data/light_client.zkey"
if [[ -z "${LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION}" ]]; then
echo "Light_Client_ZKEY_DOWNLOAD_LOCATION environment variables are not set. Using default values."
LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION="https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey"
echo "This might take a while as the file is 52G"
else
echo "Using download zkey settings from environment variables"
fi

CALCULATED_ZKEY_SUM=$(calculate_checksum "${GIT_ROOT}/data/light_client.zkey")
echo "Downloading zkey file from "${LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION}" ..."

curl "${LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION}" >"data/light_client.zkey"

CALCULATED_ZKEY_SUM=$(calculate_checksum data/light_client.zkey)

if [ "${ZKEY_B3SUM_SUM}" = "${CALCULATED_ZKEY_SUM}" ]; then
echo "Zkey file downloaded successfully to ${GIT_ROOT}/data/light_client.zkey"
echo "Zkey file downloaded successfully to data/light_client.zkey"
else
echo "Failed to download zkey file from https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey"
echo "Failed to download zkey file from "${LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION}
exit 1
fi
}

download_dat_file() {
echo "Downloading .dat file from https://dendrethstorage.blob.core.windows.net/light-client/light-client.dat ..."

curl -k https://dendrethstorage.blob.core.windows.net/light-client/light-client.dat >"data/light_client.dat"
if [[ -z "${LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION}" ]]; then
echo "LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION environment variables are not set. Using default values."
LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION="https://dendrethstorage.blob.core.windows.net/light-client/light-client.dat"
else
echo "Using download dat settings from environment variables"
fi

CALCULATED_DAT_SUM=$(calculate_checksum "${GIT_ROOT}/data/light_client.dat")
echo "Downloading .dat file from "${LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION}" ..."

curl -k "${LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION}" >"data/light_client.dat"

CALCULATED_DAT_SUM=$(calculate_checksum data/light_client.dat)

if [ "${DAT_B3SUM_SUM}" = "${CALCULATED_DAT_SUM}" ]; then
echo ".dat file downloaded successfully to ${GIT_ROOT}/data/light_client.dat"
echo ".dat file downloaded successfully to data/light_client.dat"
else
echo "Failed to download .dat file from https://dendrethstorage.blob.core.windows.net/light-client/light-client.dat"
echo "Failed to download .dat file from "${LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION}""
exit 1
fi
}
Expand Down
29 changes: 23 additions & 6 deletions relay/run-relay.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,31 +13,48 @@ calculate_checksum() {
}

download_zkey_file() {
echo "Downloading zkey file from https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey ..."

curl https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey >"data/light_client.zkey"
if [[ -z "${LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION}" ]]; then
echo "Light_Client_ZKEY_DOWNLOAD_LOCATION environment variables are not set. Using default values."
LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION="https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey"
echo "This might take a while as the file is 52G"
else
echo "Using download zkey settings from environment variables"
fi

echo "Downloading zkey file from "${LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION}" ..."

curl "${LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION}" >"data/light_client.zkey"

CALCULATED_ZKEY_SUM=$(calculate_checksum data/light_client.zkey)

if [ "${ZKEY_B3SUM_SUM}" = "${CALCULATED_ZKEY_SUM}" ]; then
echo "Zkey file downloaded successfully to data/light_client.zkey"
else
echo "Failed to download zkey file from https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey"
echo "Failed to download zkey file from "${LIGHT_CLIENT_ZKEY_DOWNLOAD_LOCATION}
exit 1
fi
}

download_dat_file() {
echo "Downloading .dat file from https://dendrethstorage.blob.core.windows.net/light-client/light-client.dat ..."

curl -k https://dendrethstorage.blob.core.windows.net/light-client/light-client.dat >"data/light_client.dat"
if [[ -z "${LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION}" ]]; then
echo "LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION environment variables are not set. Using default values."
LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION="https://dendrethstorage.blob.core.windows.net/light-client/light-client.zkey"
else
echo "Using download dat settings from environment variables"
fi

echo "Downloading .dat file from "${LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION}" ..."

curl -k "${LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION}" >"data/light_client.dat"

CALCULATED_DAT_SUM=$(calculate_checksum data/light_client.dat)

if [ "${DAT_B3SUM_SUM}" = "${CALCULATED_DAT_SUM}" ]; then
echo ".dat file downloaded successfully to data/light_client.dat"
else
echo "Failed to download .dat file from https://dendrethstorage.blob.core.windows.net/light-client/light-client.dat"
echo "Failed to download .dat file from "${LIGHT_CLIENT_DAT_DOWNLOAD_LOCATION}""
exit 1
fi
}
Expand Down