Skip to content

Commit

Permalink
Create Executables For Microservices (ethereum-optimism#191)
Browse files Browse the repository at this point in the history
Creates services kickoff script
* Adds wait_for_postgres script
* Adds docker-compose.services.yml and Dockerfile.services 
* Adding insertion of dummy tx if tx could not be parsed from logs / calldata in log-handlers
  • Loading branch information
willmeister authored Jul 27, 2020
1 parent d15918e commit 3f1ebfc
Show file tree
Hide file tree
Showing 33 changed files with 1,072 additions and 446 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ COPY . /server
RUN yarn

# Copy live env config updates file to /server so that it may be updated while running.
COPY ./packages/rollup-full-node/config/env_var_updates.config /server
COPY ./packages/rollup-core/config/env_var_updates.config /server

WORKDIR /server/packages/rollup-full-node

Expand Down
16 changes: 16 additions & 0 deletions Dockerfile.services
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
FROM node:11

WORKDIR /server
COPY . /server
RUN yarn

# Copy live env config updates file to /server so that it may be updated while running.
COPY ./packages/rollup-core/config/env_var_updates.config /server

WORKDIR /server/packages/rollup-services

# This is required for the wait_for_postgres script
RUN apt-get update
RUN apt-get install -y postgresql-client

ENTRYPOINT [ "bash", "./exec/wait_for_postgres.sh", "yarn", "run", "services" ]
4 changes: 2 additions & 2 deletions aws/dev/full-node/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ services:
environment:
- OPCODE_WHITELIST_MASK
- L1_SEQUENCER_MNEMONIC
- L2_TO_L1_MESSAGE_RECEIVER_ADDRESS
- L2_TO_L1_MESSAGE_FINALITY_DELAY_IN_BLOCKS
- L2_TO_L1_MESSAGE_RECEIVER_CONTRACT_ADDRESS
- FINALITY_DELAY_IN_BLOCKS
- L2_RPC_SERVER_HOST
- L2_RPC_SERVER_PORT
- L2_WALLET_MNEMONIC
Expand Down
4 changes: 2 additions & 2 deletions aws/synthetix/dev/full-node/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ services:
- CLEAR_DATA_KEY
- OPCODE_WHITELIST_MASK
- L1_SEQUENCER_MNEMONIC
- L2_TO_L1_MESSAGE_RECEIVER_ADDRESS
- L2_TO_L1_MESSAGE_FINALITY_DELAY_IN_BLOCKS
- L2_TO_L1_MESSAGE_RECEIVER_CONTRACT_ADDRESS
- FINALITY_DELAY_IN_BLOCKS
- L2_RPC_SERVER_HOST=0.0.0.0
- L2_RPC_SERVER_PORT=8546
- L2_RPC_SERVER_PERSISTENT_DB_PATH=/mnt/full-node/level
Expand Down
94 changes: 94 additions & 0 deletions docker-compose.services.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
version: "3"

services:

# Look at packages/rollup-core/README.md for info on configuration
microservices:
volumes:
- l1-node-data:/mnt/l1-node:rw
- l2-node-data:/mnt/l2-node:rw
build:
context: .
dockerfile: Dockerfile.services
environment:
# Logging
- DEBUG=info*,error*,warn*,debug* # The comma-separated logging patterns to match (common options are `error*`, `info*`, `warn*`, and `debug*`)
# Postgres
- POSTGRES_HOST=postgres # (Required) The host DNS entry / IP for the postgres DB
- POSTGRES_PORT=5432 # (Required) Should almost always be 5432
- POSTGRES_USER=test # (Required) The user to use to connect to the db
- POSTGRES_PASSWORD=test # (Required) The password to use to connect to the db
- POSTGRES_DATABASE=rollup # (Required) The database name to connect to (should be `rollup`)
- POSTGRES_CONNECTION_POOL_SIZE # The connection pool size for postgres (defaults to 20)
- POSTGRES_USE_SSL # Set to anything to indicate that SSL should be used in the connection
# L1 Node
- L1_NODE_INFURA_NETWORK # The Infura network for the connection to the L1 node
- L1_NODE_INFURA_PROJECT_ID # The Infura project ID for the connection to the L1 node
- L1_NODE_WEB3_URL # The URL of the L1 node
- FINALITY_DELAY_IN_BLOCKS # The number of block confirmations required to consider a transaction final on L1
# L2 Node
- L2_NODE_WEB3_URL # The URL of the L2 node
# L1 Submitters
- L1_SEQUENCER_PRIVATE_KEY # The private key to use to submit Sequencer Transaction / State Batches
# Shared Contracts
- CANONICAL_TRANSACTION_CHAIN_CONTRACT_ADDRESS # (Required) The address of the CanonicalTransactionChain contract
- STATE_COMMITMENT_CHAIN_CONTRACT_ADDRESS # (Required) The address of the StateCommitmentChain contract
# L1 Chain Data Persister (needs Postgres & L1 Node vars above)
- L1_TO_L2_TRANSACTION_QUEUE_CONTRACT_ADDRESS # (Required) The address of the L1ToL2TransactionQueue contract
- SAFETY_TRANSACTION_QUEUE_CONTRACT_ADDRESS # (Required) The address of the SafetyTransactionQueue contract
- L1_CHAIN_DATA_PERSISTER_DB_PATH # (Required) The filepath where to locate (or create) the L1 Chain Data Persister LevelDB database
- L1_EARLIEST_BLOCK # (Required) The earliest block to sync on L1 to start persisting data
# L2 Chain Data Persister (needs Postgres & L2 Node vars above)
- L2_CHAIN_DATA_PERSISTER_DB_PATH # (Required) The filepath where to locate (or create) the L2 Chain Data Persister LevelDB database
# Geth Submission Queuer (needs Postgres vars above)
- IS_SEQUENCER_STACK # (Required) Set if this is queueing Geth submissions for a sequencer (and not _just_ a verifier)
- GETH_SUBMISSION_QUEUER_PERIOD_MILLIS # The period in millis at which the GethSubmissionQueuer should attempt to queue an L2 Geth submission (defaults to 10,000)
# Queued Geth Submitter (needs Postgres & L2 Node vars above)
- QUEUED_GETH_SUBMITTER_PERIOD_MILLIS # The period in millis at which the QueuedGethSubmitter should attempt to send L2 Geth submissions (defaults to 10,000)
# Canonical Transaction Chain Batch Creator (needs Postgres vars above)
- CANONICAL_CHAIN_MIN_BATCH_SIZE # The minimum batch size to build -- if fewer than this number of transactions are ready, a batch will not be created (defaults to 10)
- CANONICAL_CHAIN_MAX_BATCH_SIZE # The maximum batch size to build -- if more than this number of transactions are ready, they will be split into multiple batches of at most this size (defaults to 100)
- CANONICAL_CHAIN_BATCH_CREATOR_PERIOD_MILLIS # The period in millis at which the CanonicalChainBatchCreator should attempt to create Canonical Chain Batches (defaults to 10,000)
# Canonical Transaction Chain Batch Submitter (needs Postgres, L1 Node, L1 Submitters, and CANONICAL_TRANSACTION_CHAIN_CONTRACT_ADDRESS vars above)
- CANONICAL_CHAIN_BATCH_SUBMITTER_PERIOD_MILLIS # The period in millis at which the CanonicalChainBatchCreator should attempt to create Canonical Chain Batches (defaults to 10,000)
# State Commitment Chain Batch Creator (needs Postgres vars above)
- STATE_COMMITMENT_CHAIN_MIN_BATCH_SIZE # The minimum batch size to build -- if fewer than this number of transactions are ready, a batch will not be created (defaults to 10)
- STATE_COMMITMENT_CHAIN_MAX_BATCH_SIZE # The maximum batch size to build -- if more than this number of transactions are ready, they will be split into multiple batches of at most this size (defaults to 100)
- STATE_COMMITMENT_CHAIN_BATCH_CREATOR_PERIOD_MILLIS # The period in millis at which the StateCommitmentChainBatchCreator should attempt to create StateCommitmentChain Batches (defaults to 10,000)
# State Commitment Chain Batch Submitter (needs Postgres, L1 Node, L1 Submitters, STATE_COMMITMENT_CHAIN_CONTRACT_ADDRESS vars above)
- STATE_COMMITMENT_CHAIN_BATCH_SUBMITTER_PERIOD_MILLIS # The period in millis at which the StateCommitmentChainBatchCreator should attempt to create StateCommitmentChain Batches (defaults to 10,000)
# Fraud Detector
- FRAUD_DETECTOR_PERIOD_MILLIS # The period in millis at which the FraudDetector should run (defaults to 10,000)
- REALERT_ON_UNRESOLVED_FRAUD_EVERY_N_FRAUD_DETECTOR_RUNS # The number of runs after which a detected fraud, if still present, should re-alert (via error logs) (defaults to 10)
# Which Services to run (respective vars must be configured above)
- RUN_L1_CHAIN_DATA_PERSISTER # Set to anything to run L1 Chain Data Persister
- RUN_L2_CHAIN_DATA_PERSISTER # Set to anything to run L2 Chain Data Persister
- RUN_GETH_SUBMISSION_QUEUER # Set to anything to run Geth Submission Queuer
- RUN_QUEUED_GETH_SUBMITTER # Set to anything to run Queued Geth Submitter
- RUN_CANONICAL_CHAIN_BATCH_CREATOR # Set to anything to run Canonical Chain Batch Creator
- RUN_CANONICAL_CHAIN_BATCH_SUBMITTER # Set to anything to run Canonical Chain Batch Submitter
- RUN_STATE_COMMITMENT_CHAIN_BATCH_CREATOR # Set to anything to run State Commitment Chain Batch Creator
- RUN_STATE_COMMITMENT_CHAIN_BATCH_SUBMITTER # Set to anything to run State Commitment Chain Batch Submitter
- RUN_FRAUD_DETECTOR # Set to anything to run Fraud Detector

postgres:
build:
context: ./db/
dockerfile: db.dockerfile
environment:
- POSTGRES_USER=test
- POSTGRES_PASSWORD=test
ports:
- 5432:5432

volumes:
l1-node-data:
l2-node-data:








6 changes: 3 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ services:
- L1_SEQUENCER_PRIVATE_KEY
- L1_SEQUENCER_MNEMONIC
- L1_EARLIEST_BLOCK
- L1_TO_L2_TRANSACTION_PASSER_ADDRESS
- L2_TO_L1_MESSAGE_RECEIVER_ADDRESS
- L2_TO_L1_MESSAGE_FINALITY_DELAY_IN_BLOCKS
- L1_TO_L2_TRANSACTION_PASSER_CONTRACT_ADDRESS
- L2_TO_L1_MESSAGE_RECEIVER_CONTRACT_ADDRESS
- FINALITY_DELAY_IN_BLOCKS
- L2_RPC_SERVER_HOST
- L2_RPC_SERVER_PORT=8545
- L2_RPC_SERVER_PERSISTENT_DB_PATH=/mnt/full-node/level
Expand Down
1 change: 1 addition & 0 deletions lerna.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"packages/rollup-core/*",
"packages/rollup-dev-tools/*",
"packages/rollup-full-node/*",
"packages/rollup-services/*",
"packages/solc-transpiler/*"
],
"command": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,4 @@
# set the value to $DELETE$ to clear any set value it may have.

#DUMMY_ENV_VAR=somevalue

# REQUEST_LIMIT_PERIOD_MILLIS=
# MAX_TRANSACTIONS_PER_UNIT_TIME=
# MAX_NON_TRANSACTION_REQUESTS_PER_UNIT_TIME=

# CONTRACT_DEPLOYER_ADDRESS=
# COMMA_SEPARATED_TO_ADDRESS_WHITELIST=
# COMMA_SEPARATED_RATE_LIMIT_WHITELISTED_IPS=
#VAL_TO_BE_DELETED=$DELETE$
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import { getLogger, logError, ScheduledTask } from '@eth-optimism/core-utils'

/* Internal Imports */
import { DataService, GethSubmissionRecord } from '../../../types/data'
import { DataService } from '../../../types/data'

const log = getLogger('l2-batch-creator')

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ export class FraudDetector extends ScheduledTask {
public async runTask(): Promise<void> {
const verifierCandidate: VerificationCandidate = await this.dataService.getNextVerificationCandidate()
if (!verifierCandidate) {
log.debug(`No verifier candidate is available, returning...`)
return
}

Expand All @@ -53,7 +54,12 @@ export class FraudDetector extends ScheduledTask {
log.error(
`Batch #${verifierCandidate.batchNumber} state roots differ at index ${i}! L1 root: ${root.l1Root}, Geth root: ${root.gethRoot}`
)
await this.fraudProver.proveFraud(verifierCandidate.batchNumber, i)
if (!!this.fraudProver) {
await this.fraudProver.proveFraud(verifierCandidate.batchNumber, i)
} else {
// TODO: take this away and make Fraud Prover mandatory when Fraud Prover exists.
log.error(`No Fraud Prover Configured!`)
}
}
this.fraudCount++
return
Expand Down
1 change: 1 addition & 0 deletions packages/rollup-core/src/app/data/consumers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ export * from './geth-submission-queuer'
export * from './canonical-chain-batch-creator'
export * from './canonical-chain-batch-submitter'
export * from './state-commitment-chain-batch-creator'
export * from './state-commitment-chain-batch-submitter'
export * from './fraud-detector'
4 changes: 2 additions & 2 deletions packages/rollup-core/src/app/data/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export * from './consumers/'
export * from './producers/'
export * from './consumers'
export * from './producers'

export * from './data-service'
78 changes: 42 additions & 36 deletions packages/rollup-core/src/app/data/producers/log-handlers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import {
BigNumber,
getLogger,
getTxSigner,
INVALID_ADDRESS,
logError,
remove0x,
} from '@eth-optimism/core-utils'
Expand All @@ -18,6 +19,7 @@ import { ethers } from 'ethers'
import {
Address,
L1DataService,
LogHandler,
QueueOrigin,
RollupTransaction,
} from '../../../types'
Expand All @@ -26,6 +28,13 @@ import { CHAIN_ID } from '../../constants'
const abi = new ethers.utils.AbiCoder()
const log = getLogger('log-handler')

const defaultTransaction: Partial<RollupTransaction> = {
sender: INVALID_ADDRESS,
target: INVALID_ADDRESS,
gasLimit: 1,
calldata: '0x',
}

/**
* Handles the L1ToL2TxEnqueued event by parsing a RollupTransaction
* from the event data and storing it in the DB.
Expand All @@ -52,29 +61,29 @@ export const L1ToL2TxEnqueuedLogHandler = async (

const data: string = remove0x(l.data)

let rollupTransaction: RollupTransaction
const rollupTransaction: any = { ...defaultTransaction }
try {
rollupTransaction = {
l1BlockNumber: tx.blockNumber,
l1Timestamp: tx.timestamp,
l1TxHash: l.transactionHash,
l1TxIndex: l.transactionIndex,
l1TxLogIndex: l.transactionLogIndex,
queueOrigin: QueueOrigin.L1_TO_L2_QUEUE,
indexWithinSubmission: 0,
sender: l.address,
l1MessageSender: add0x(data.substr(0, 40)),
target: add0x(data.substr(40, 40)),
// TODO: Change gasLimit to a BigNumber so it can support 256 bits
gasLimit: new BigNumber(data.substr(80, 64), 'hex').toNumber(),
calldata: add0x(data.substr(144)),
}
rollupTransaction.l1BlockNumber = tx.blockNumber
rollupTransaction.l1Timestamp = tx.timestamp
rollupTransaction.l1TxHash = l.transactionHash
rollupTransaction.l1TxIndex = l.transactionIndex
rollupTransaction.l1TxLogIndex = l.transactionLogIndex
rollupTransaction.queueOrigin = QueueOrigin.L1_TO_L2_QUEUE
rollupTransaction.indexWithinSubmission = 0
rollupTransaction.sender = l.address
rollupTransaction.l1MessageSender = add0x(data.substr(0, 40))
rollupTransaction.target = add0x(data.substr(40, 40))
// TODO: Change gasLimit to a BigNumber so it can support 256 bits
rollupTransaction.gasLimit = new BigNumber(
data.substr(80, 64),
'hex'
).toNumber()
rollupTransaction.calldata = add0x(data.substr(144))
} catch (e) {
// This is, by definition, just an ill-formatted, and therefore invalid, tx.
log.debug(
`Error parsing calldata tx from CalldataTxEnqueued event. Calldata: ${tx.data}. Error: ${e.message}. Stack: ${e.stack}.`
)
return
}

await ds.insertL1RollupTransactions(l.transactionHash, [rollupTransaction])
Expand Down Expand Up @@ -105,7 +114,7 @@ export const CalldataTxEnqueuedLogHandler = async (
`CalldataTxEnqueued event received at block ${tx.blockNumber}, tx ${l.transactionIndex}, log: ${l.transactionLogIndex}. TxHash: ${tx.hash}. Calldata: ${tx.data}`
)

let rollupTransaction: RollupTransaction
const rollupTransaction: any = { ...defaultTransaction }
try {
// Skip the 4 bytes of MethodID
const l1TxCalldata = remove0x(ethers.utils.hexDataSlice(tx.data, 4))
Expand All @@ -131,29 +140,26 @@ export const CalldataTxEnqueuedLogHandler = async (
const v = parseInt(signature.substr(130, 2), 16)
const sender: string = await getTxSigner(unsigned, r, s, v)

rollupTransaction = {
l1BlockNumber: tx.blockNumber,
l1Timestamp: tx.timestamp,
l1TxHash: l.transactionHash,
l1TxIndex: l.transactionIndex,
l1TxLogIndex: l.transactionLogIndex,
queueOrigin: QueueOrigin.SAFETY_QUEUE,
indexWithinSubmission: 0,
sender,
target,
// TODO Change nonce to a BigNumber so it can support 256 bits
nonce: nonce.toNumber(),
// TODO: Change gasLimit to a BigNumber so it can support 256 bits
gasLimit: gasLimit.toNumber(),
signature,
calldata,
}
rollupTransaction.l1BlockNumber = tx.blockNumber
rollupTransaction.l1Timestamp = tx.timestamp
rollupTransaction.l1TxHash = l.transactionHash
rollupTransaction.l1TxIndex = l.transactionIndex
rollupTransaction.l1TxLogIndex = l.transactionLogIndex
rollupTransaction.queueOrigin = QueueOrigin.SAFETY_QUEUE
rollupTransaction.indexWithinSubmission = 0
rollupTransaction.sender = sender
rollupTransaction.target = target
// TODO Change nonce to a BigNumber so it can support 256 bits
rollupTransaction.nonce = nonce.toNumber()
// TODO= Change gasLimit to a BigNumber so it can support 256 bits
rollupTransaction.gasLimit = gasLimit.toNumber()
rollupTransaction.signature = signature
rollupTransaction.calldata = calldata
} catch (e) {
// This is, by definition, just an ill-formatted, and therefore invalid, tx.
log.debug(
`Error parsing calldata tx from CalldataTxEnqueued event. Calldata: ${tx.data}. Error: ${e.message}. Stack: ${e.stack}.`
)
return
}

await ds.insertL1RollupTransactions(l.transactionHash, [rollupTransaction])
Expand Down
Loading

0 comments on commit 3f1ebfc

Please sign in to comment.