diff --git a/.changeset/config.json b/.changeset/config.json index b6606880a..3f3c1b3ef 100644 --- a/.changeset/config.json +++ b/.changeset/config.json @@ -5,6 +5,7 @@ "fixed": [["@ponder/core", "create-ponder", "eslint-config-ponder"]], "ignore": [ "@ponder/common", + "ponder-examples-accounts", "ponder-examples-feature-blocks", "ponder-examples-feature-factory", "ponder-examples-feature-filter", diff --git a/examples/feature-accounts/.env.example b/examples/feature-accounts/.env.example new file mode 100644 index 000000000..f7745c21c --- /dev/null +++ b/examples/feature-accounts/.env.example @@ -0,0 +1,5 @@ +# Mainnet RPC URL used for fetching blockchain data. Alchemy is recommended. +PONDER_RPC_URL_1=https://eth-mainnet.g.alchemy.com/v2/... + +# (Optional) Postgres database URL. If not provided, SQLite will be used. +DATABASE_URL= \ No newline at end of file diff --git a/examples/feature-accounts/.eslintrc.json b/examples/feature-accounts/.eslintrc.json new file mode 100644 index 000000000..359e2bbfa --- /dev/null +++ b/examples/feature-accounts/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "ponder" +} diff --git a/examples/feature-accounts/.gitignore b/examples/feature-accounts/.gitignore new file mode 100644 index 000000000..f0c7e1177 --- /dev/null +++ b/examples/feature-accounts/.gitignore @@ -0,0 +1,18 @@ +# Dependencies +/node_modules + +# Debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# Misc +.DS_Store + +# Env files +.env*.local + +# Ponder +/generated/ +/.ponder/ diff --git a/examples/feature-accounts/package.json b/examples/feature-accounts/package.json new file mode 100644 index 000000000..bb7462de7 --- /dev/null +++ b/examples/feature-accounts/package.json @@ -0,0 +1,27 @@ +{ + "name": "ponder-examples-feature-accounts", + "private": true, + "type": "module", + "scripts": { + "dev": "ponder dev", + "start": "ponder start", + "codegen": "ponder codegen", + "serve": "ponder serve", + "lint": "eslint .", + "typecheck": "tsc" + }, + "dependencies": { + "@ponder/core": "workspace:*", + "hono": "^4.5.0", + "viem": "^2.21.3" + }, + "devDependencies": { + "@types/node": "^20.10.0", + "eslint": "^8.54.0", + "eslint-config-ponder": "workspace:*", + "typescript": "^5.3.2" + }, + "engines": { + "node": ">=18.14" + } +} diff --git a/examples/feature-accounts/ponder-env.d.ts b/examples/feature-accounts/ponder-env.d.ts new file mode 100644 index 000000000..e7f300973 --- /dev/null +++ b/examples/feature-accounts/ponder-env.d.ts @@ -0,0 +1,27 @@ +// This file enables type checking and editor autocomplete for this Ponder project. +// After upgrading, you may find that changes have been made to this file. +// If this happens, please commit the changes. Do not manually edit this file. +// See https://ponder.sh/docs/getting-started/installation#typescript for more information. + +declare module "@/generated" { + import type { Virtual } from "@ponder/core"; + + type config = typeof import("./ponder.config.ts").default; + type schema = typeof import("./ponder.schema.ts"); + + export const ponder: Virtual.Registry; + + export type EventNames = Virtual.EventNames; + export type Event = Virtual.Event< + config, + name + >; + export type Context = Virtual.Context< + config, + schema, + name + >; + export type ApiContext = Virtual.ApiContext; + export type IndexingFunctionArgs = + Virtual.IndexingFunctionArgs; +} diff --git a/examples/feature-accounts/ponder.config.ts b/examples/feature-accounts/ponder.config.ts new file mode 100644 index 000000000..8e600958c --- /dev/null +++ b/examples/feature-accounts/ponder.config.ts @@ -0,0 +1,22 @@ +import { createConfig } from "@ponder/core"; +import { http, createPublicClient } from "viem"; + +const latestBlockMainnet = await createPublicClient({ + transport: http(process.env.PONDER_RPC_URL_1), +}).getBlock(); + +export default createConfig({ + networks: { + mainnet: { + chainId: 1, + transport: http(process.env.PONDER_RPC_URL_1), + }, + }, + accounts: { + BeaverBuilder: { + network: "mainnet", + startBlock: Number(latestBlockMainnet.number) - 100, + address: "0x95222290DD7278Aa3Ddd389Cc1E1d165CC4BAfe5", + }, + }, +}); diff --git a/examples/feature-accounts/ponder.schema.ts b/examples/feature-accounts/ponder.schema.ts new file mode 100644 index 000000000..e29cea83a --- /dev/null +++ b/examples/feature-accounts/ponder.schema.ts @@ -0,0 +1,7 @@ +import { onchainTable } from "@ponder/core"; + +export const transactionEvents = onchainTable("transaction_events", (t) => ({ + to: t.hex().primaryKey(), + value: t.bigint().notNull(), + data: t.hex().notNull(), +})); diff --git a/examples/feature-accounts/src/index.ts b/examples/feature-accounts/src/index.ts new file mode 100644 index 000000000..accb5eeb4 --- /dev/null +++ b/examples/feature-accounts/src/index.ts @@ -0,0 +1,22 @@ +import { ponder } from "@/generated"; +import * as schema from "../ponder.schema"; + +ponder.on("BeaverBuilder:transaction:from", async ({ event, context }) => { + if (event.transaction.to === null) return; + + await context.db + .insert(schema.transactionEvents) + .values({ + to: event.transaction.to, + value: event.transaction.value, + data: event.transaction.input, + }) + .onConflictDoUpdate((row) => ({ + value: row.value + event.transaction.value, + data: event.transaction.input, + })); +}); + +ponder.on("BeaverBuilder:transfer:to", async ({ event }) => { + console.log("sent", event.transfer); +}); diff --git a/examples/feature-accounts/tsconfig.json b/examples/feature-accounts/tsconfig.json new file mode 100644 index 000000000..592b9a939 --- /dev/null +++ b/examples/feature-accounts/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + // Type checking + "strict": true, + "noUncheckedIndexedAccess": true, + + // Interop constraints + "verbatimModuleSyntax": false, + "esModuleInterop": true, + "isolatedModules": true, + "allowSyntheticDefaultImports": true, + "resolveJsonModule": true, + + // Language and environment + "moduleResolution": "bundler", + "module": "ESNext", + "noEmit": true, + "lib": ["ES2022"], + "target": "ES2022", + + // Skip type checking for node modules + "skipLibCheck": true + }, + "include": ["./**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/packages/core/src/_test/constants.ts b/packages/core/src/_test/constants.ts index f7ca3d2bf..6d5adfcfc 100644 --- a/packages/core/src/_test/constants.ts +++ b/packages/core/src/_test/constants.ts @@ -6,9 +6,3 @@ export const ACCOUNTS = [ // Named accounts export const [ALICE, BOB] = ACCOUNTS; - -// Deployed contract addresses. -export const CONTRACTS = { - erc20Address: "0x5fbdb2315678afecb367f032d93f642f64180aa3", - factoryAddress: "0xe7f1725e7734ce288f8367e1bb143e90bb3f0512", -} as const; diff --git a/packages/core/src/_test/e2e/erc20/erc20.test.ts b/packages/core/src/_test/e2e/erc20/erc20.test.ts index e1c1e77b2..20374d627 100644 --- a/packages/core/src/_test/e2e/erc20/erc20.test.ts +++ b/packages/core/src/_test/e2e/erc20/erc20.test.ts @@ -1,11 +1,11 @@ import path from "node:path"; -import { ALICE, BOB } from "@/_test/constants.js"; +import { ALICE } from "@/_test/constants.js"; import { setupAnvil, setupCommon, setupIsolatedDatabase, } from "@/_test/setup.js"; -import { simulate } from "@/_test/simulate.js"; +import { deployErc20, mintErc20 } from "@/_test/simulate.js"; import { getFreePort, postGraphql, @@ -13,9 +13,8 @@ import { } from "@/_test/utils.js"; import { serve } from "@/bin/commands/serve.js"; import { start } from "@/bin/commands/start.js"; -import { range } from "@/utils/range.js"; import { rimrafSync } from "rimraf"; -import { zeroAddress } from "viem"; +import { parseEther, zeroAddress } from "viem"; import { beforeEach, describe, expect, test } from "vitest"; const rootDir = path.join(".", "src", "_test", "e2e", "erc20"); @@ -35,7 +34,7 @@ const cliOptions = { logFormat: "pretty", }; -test("erc20", async (context) => { +test("erc20", async () => { const port = await getFreePort(); const cleanup = await start({ @@ -46,12 +45,16 @@ test("erc20", async (context) => { }, }); - await simulate({ - erc20Address: context.erc20.address, - factoryAddress: context.factory.address, + const { address } = await deployErc20({ sender: ALICE }); + + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, }); - await waitForIndexedBlock(port, "mainnet", 8); + await waitForIndexedBlock(port, "mainnet", 2); const response = await postGraphql( port, @@ -67,19 +70,16 @@ test("erc20", async (context) => { expect(response.status).toBe(200); const body = (await response.json()) as any; + expect(body.errors).toBe(undefined); const accounts = body.data.accounts.items; expect(accounts[0]).toMatchObject({ address: zeroAddress, - balance: (-2 * 10 ** 18).toString(), + balance: (-1 * 10 ** 18).toString(), }); expect(accounts[1]).toMatchObject({ - address: BOB.toLowerCase(), - balance: (2 * 10 ** 18).toString(), - }); - expect(accounts[2]).toMatchObject({ address: ALICE.toLowerCase(), - balance: "0", + balance: (10 ** 18).toString(), }); await cleanup(); @@ -89,7 +89,7 @@ const isPglite = !!process.env.DATABASE_URL; // Fix this once it's easier to have per-command kill functions in Ponder.ts. describe.skipIf(isPglite)("postgres database", () => { - test.todo("ponder serve", async (context) => { + test.todo("ponder serve", async () => { const startPort = await getFreePort(); const cleanupStart = await start({ @@ -100,13 +100,14 @@ describe.skipIf(isPglite)("postgres database", () => { }, }); - for (const _ in range(0, 3)) { - await simulate({ - erc20Address: context.erc20.address, - factoryAddress: context.factory.address, - }); - } + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); const servePort = await getFreePort(); const cleanupServe = await serve({ @@ -137,15 +138,11 @@ describe.skipIf(isPglite)("postgres database", () => { expect(accounts).toHaveLength(3); expect(accounts[0]).toMatchObject({ address: zeroAddress, - balance: (-4 * 10 ** 18).toString(), + balance: (-1 * 10 ** 18).toString(), }); expect(accounts[1]).toMatchObject({ - address: BOB.toLowerCase(), - balance: (4 * 10 ** 18).toString(), - }); - expect(accounts[2]).toMatchObject({ address: ALICE.toLowerCase(), - balance: "0", + balance: (10 ** 18).toString(), }); await cleanupServe(); diff --git a/packages/core/src/_test/e2e/erc20/ponder.config.ts b/packages/core/src/_test/e2e/erc20/ponder.config.ts index ce217d77e..dc014fde4 100644 --- a/packages/core/src/_test/e2e/erc20/ponder.config.ts +++ b/packages/core/src/_test/e2e/erc20/ponder.config.ts @@ -1,7 +1,5 @@ import { http } from "viem"; - import { createConfig } from "../../../config/config.js"; -import { CONTRACTS } from "../../constants.js"; import { erc20ABI } from "../../generated.js"; const poolId = Number(process.env.VITEST_POOL_ID ?? 1); @@ -29,11 +27,7 @@ export default createConfig({ Erc20: { network: "mainnet", abi: erc20ABI, - address: CONTRACTS.erc20Address, - filter: { - event: - "Transfer(address indexed from, address indexed to, uint256 amount)", - }, + address: "0x5fbdb2315678afecb367f032d93f642f64180aa3", }, }, }); diff --git a/packages/core/src/_test/e2e/factory/factory.test.ts b/packages/core/src/_test/e2e/factory/factory.test.ts index 2a866c56a..e87c75a93 100644 --- a/packages/core/src/_test/e2e/factory/factory.test.ts +++ b/packages/core/src/_test/e2e/factory/factory.test.ts @@ -5,7 +5,9 @@ import { setupCommon, setupIsolatedDatabase, } from "@/_test/setup.js"; -import { simulatePairSwap } from "@/_test/simulate.js"; +import { deployFactory } from "@/_test/simulate.js"; +import { createPair } from "@/_test/simulate.js"; +import { swapPair } from "@/_test/simulate.js"; import { getFreePort, postGraphql, @@ -32,7 +34,7 @@ const cliOptions = { logFormat: "pretty", }; -test("factory", async (context) => { +test("factory", async () => { const port = await getFreePort(); const cleanup = await start({ @@ -43,7 +45,20 @@ test("factory", async (context) => { }, }); - await waitForIndexedBlock(port, "mainnet", 5); + const { address } = await deployFactory({ sender: ALICE }); + const { result: pair } = await createPair({ + factory: address, + sender: ALICE, + }); + await swapPair({ + pair, + amount0Out: 1n, + amount1Out: 1n, + to: ALICE, + sender: ALICE, + }); + + await waitForIndexedBlock(port, "mainnet", 3); let response = await postGraphql( port, @@ -69,12 +84,18 @@ test("factory", async (context) => { id: expect.any(String), from: ALICE.toLowerCase(), to: ALICE.toLowerCase(), - pair: context.factory.pair.toLowerCase(), + pair, }); - await simulatePairSwap(context.factory.pair); + await swapPair({ + pair, + amount0Out: 1n, + amount1Out: 1n, + to: ALICE, + sender: ALICE, + }); - await waitForIndexedBlock(port, "mainnet", 6); + await waitForIndexedBlock(port, "mainnet", 4); response = await postGraphql( port, diff --git a/packages/core/src/_test/e2e/factory/ponder.config.ts b/packages/core/src/_test/e2e/factory/ponder.config.ts index 875fc0392..d8e8f5708 100644 --- a/packages/core/src/_test/e2e/factory/ponder.config.ts +++ b/packages/core/src/_test/e2e/factory/ponder.config.ts @@ -1,7 +1,5 @@ import { http, getAbiItem } from "viem"; - import { createConfig } from "../../../config/config.js"; -import { CONTRACTS } from "../../constants.js"; import { factoryABI, pairABI } from "../../generated.js"; const poolId = Number(process.env.VITEST_POOL_ID ?? 1); @@ -30,7 +28,7 @@ export default createConfig({ network: "mainnet", abi: pairABI, factory: { - address: CONTRACTS.factoryAddress, + address: "0x5fbdb2315678afecb367f032d93f642f64180aa3", event: getAbiItem({ abi: factoryABI, name: "PairCreated" }), parameter: "pair", }, diff --git a/packages/core/src/_test/setup.ts b/packages/core/src/_test/setup.ts index 07ce0a74b..bfb7596f7 100644 --- a/packages/core/src/_test/setup.ts +++ b/packages/core/src/_test/setup.ts @@ -4,9 +4,7 @@ import { createLogger } from "@/common/logger.js"; import { MetricsService } from "@/common/metrics.js"; import { buildOptions } from "@/common/options.js"; import { createTelemetry } from "@/common/telemetry.js"; -import type { Config } from "@/config/config.js"; import type { DatabaseConfig } from "@/config/database.js"; -import type { Network } from "@/config/networks.js"; import { type Database, createDatabase } from "@/database/index.js"; import type { Schema } from "@/drizzle/index.js"; import type { IndexingStore } from "@/indexing-store/index.js"; @@ -16,37 +14,16 @@ import { } from "@/indexing-store/metadata.js"; import { createRealtimeIndexingStore } from "@/indexing-store/realtime.js"; import { type SyncStore, createSyncStore } from "@/sync-store/index.js"; -import type { BlockSource, ContractSource, LogFactory } from "@/sync/source.js"; import { createPglite } from "@/utils/pglite.js"; -import type { RequestQueue } from "@/utils/requestQueue.js"; import type { PGlite } from "@electric-sql/pglite"; import pg from "pg"; -import type { Address } from "viem"; import { type TestContext, afterAll } from "vitest"; -import { deploy, simulate } from "./simulate.js"; -import { - getConfig, - getNetworkAndSources, - poolId, - testClient, -} from "./utils.js"; +import { poolId, testClient } from "./utils.js"; declare module "vitest" { export interface TestContext { common: Common; databaseConfig: DatabaseConfig; - sources: [ - ContractSource<"log", undefined>, - ContractSource<"log", LogFactory>, - ContractSource<"trace", LogFactory>, - ContractSource<"trace", undefined>, - BlockSource, - ]; - networks: [Network]; - requestQueues: [RequestQueue]; - config: Config; - erc20: { address: Address }; - factory: { address: Address; pair: Address }; } } @@ -244,42 +221,17 @@ export async function setupDatabaseServices( } /** - * Sets up an isolated Ethereum client on the test context, with the appropriate Erc20 + Factory state. + * Sets up an isolated Ethereum client. * + * @example * ```ts * // Add this to any test suite that uses the Ethereum client. - * beforeEach((context) => setupAnvil(context)) + * beforeEach(setupAnvil) * ``` */ -export async function setupAnvil(context: TestContext) { +export async function setupAnvil() { const emptySnapshotId = await testClient.snapshot(); - // Chain state setup shared across all tests. - const addresses = await deploy(); - const pair = await simulate(addresses); - await testClient.mine({ blocks: 1 }); - - context.config = getConfig(addresses); - - const { networks, sources, requestQueues } = await getNetworkAndSources( - addresses, - context.common, - ); - context.networks = networks as [Network]; - context.requestQueues = requestQueues as [RequestQueue]; - context.sources = sources as [ - ContractSource<"log", undefined>, - ContractSource<"log", LogFactory>, - ContractSource<"trace", LogFactory>, - ContractSource<"trace", undefined>, - BlockSource, - ]; - context.erc20 = { address: addresses.erc20Address }; - context.factory = { - address: addresses.factoryAddress, - pair: pair.toLowerCase() as Address, - }; - return async () => { await testClient.revert({ id: emptySnapshotId }); }; diff --git a/packages/core/src/_test/simulate.ts b/packages/core/src/_test/simulate.ts index cc05fe033..08cf37a89 100644 --- a/packages/core/src/_test/simulate.ts +++ b/packages/core/src/_test/simulate.ts @@ -1,115 +1,178 @@ -import { type Address, type Hex, parseEther } from "viem"; - -import { ALICE, BOB } from "./constants.js"; +import { toLowerCase } from "@/utils/lowercase.js"; +import { http, type Address, type Hex, createWalletClient } from "viem"; import Erc20Bytecode from "./contracts/out/ERC20.sol/ERC20.json"; import FactoryBytecode from "./contracts/out/Factory.sol/Factory.json"; import { erc20ABI, factoryABI, pairABI } from "./generated.js"; -import { publicClient, testClient, walletClient } from "./utils.js"; +import { anvil, publicClient, testClient } from "./utils.js"; + +/** Deploy Erc20 contract and mine block. */ +export const deployErc20 = async (params: { sender: Address }) => { + const walletClient = createWalletClient({ + chain: anvil, + transport: http(), + account: params.sender, + }); -/** - * Deploy Two ERC20 tokens and a Factory contract. All happens in one block. - */ -export const deploy = async () => { - const deployHashErc20 = await walletClient.deployContract({ + const hash = await walletClient.deployContract({ abi: erc20ABI, bytecode: Erc20Bytecode.bytecode.object as Hex, args: ["name", "symbol", 18], }); - const deployHashFactory = await walletClient.deployContract({ + await testClient.mine({ blocks: 1 }); + const { contractAddress } = await publicClient.waitForTransactionReceipt({ + hash, + }); + + return { address: contractAddress!, hash }; +}; + +/** Deploy Factory contract and mine block. */ +export const deployFactory = async (params: { sender: Address }) => { + const walletClient = createWalletClient({ + chain: anvil, + transport: http(), + account: params.sender, + }); + + const hash = await walletClient.deployContract({ abi: factoryABI, bytecode: FactoryBytecode.bytecode.object as Hex, }); await testClient.mine({ blocks: 1 }); + const { contractAddress } = await publicClient.waitForTransactionReceipt({ + hash, + }); - const { contractAddress: erc20Address } = - await publicClient.waitForTransactionReceipt({ - hash: deployHashErc20, - }); - const { contractAddress: factoryAddress } = - await publicClient.waitForTransactionReceipt({ - hash: deployHashFactory, - }); - return { - erc20Address: erc20Address!, - factoryAddress: factoryAddress!, - }; + return { address: contractAddress!, hash }; }; -/** - * Simulate network activity - * - * 1) Mint one tokens to Alice - * 2) Transfer one token from Alice to Bob - * 3) Create a pair - * 4) Swap on created pair - * - * Blocks are created after 2, 3, and 4. - * - * @returns The pair address - */ -export const simulate = async ( - addresses: Awaited>, -): Promise
=> { - await simulateErc20(addresses.erc20Address); - const pairAddress = await simulateFactoryDeploy(addresses.factoryAddress); - await simulatePairSwap(pairAddress); - - return pairAddress; -}; +/** Mint Erc20 tokens and mine block. */ +export const mintErc20 = async (params: { + erc20: Address; + to: Address; + amount: bigint; + sender: Address; +}) => { + const walletClient = createWalletClient({ + chain: anvil, + transport: http(), + account: params.sender, + }); -export const simulateErc20 = async (erc20Address: Address) => { - // Mint 1 token to ALICE - const mintHash = await walletClient.writeContract({ + const hash = await walletClient.writeContract({ abi: erc20ABI, functionName: "mint", - address: erc20Address, - args: [ALICE, parseEther("1")], + address: params.erc20, + args: [params.to, params.amount], + }); + + await testClient.mine({ blocks: 1 }); + await publicClient.waitForTransactionReceipt({ hash }); + + return { hash }; +}; + +/** Transfer Erc20 tokens and mine block. */ +export const transferErc20 = async (params: { + erc20: Address; + to: Address; + amount: bigint; + sender: Address; +}) => { + const walletClient = createWalletClient({ + chain: anvil, + transport: http(), + account: params.sender, }); - // Transfer 1 token from ALICE to BOB - const transferHash = await walletClient.writeContract({ + const hash = await walletClient.writeContract({ abi: erc20ABI, functionName: "transfer", - address: erc20Address, - args: [BOB, parseEther("1")], + address: params.erc20, + args: [params.to, params.amount], }); await testClient.mine({ blocks: 1 }); + await publicClient.waitForTransactionReceipt({ hash }); - await publicClient.waitForTransactionReceipt({ hash: mintHash }); - await publicClient.waitForTransactionReceipt({ hash: transferHash }); + return { hash }; }; -export const simulateFactoryDeploy = async ( - factoryAddress: Address, -): Promise
=> { +/** Create pair and mine block. */ +export const createPair = async (params: { + factory: Address; + sender: Address; +}) => { + const walletClient = createWalletClient({ + chain: anvil, + transport: http(), + account: params.sender, + }); + const { result, request } = await publicClient.simulateContract({ abi: factoryABI, functionName: "createPair", - address: factoryAddress, + address: params.factory, }); - const createPairHash = await walletClient.writeContract(request); - await testClient.mine({ blocks: 1 }); + const hash = await walletClient.writeContract(request); + await testClient.mine({ blocks: 1 }); await publicClient.waitForTransactionReceipt({ - hash: createPairHash, + hash, }); - return result; + return { result: toLowerCase(result), hash }; }; -export const simulatePairSwap = async (pairAddress: Address) => { - const swapHash = await walletClient.writeContract({ +/** Swap tokens in pair and mine block. */ +export const swapPair = async (params: { + pair: Address; + amount0Out: bigint; + amount1Out: bigint; + to: Address; + sender: Address; +}) => { + const walletClient = createWalletClient({ + chain: anvil, + transport: http(), + account: params.sender, + }); + + const hash = await walletClient.writeContract({ abi: pairABI, functionName: "swap", - address: pairAddress, - args: [1n, 2n, ALICE], + address: params.pair, + args: [params.amount0Out, params.amount1Out, params.to], + }); + + await testClient.mine({ blocks: 1 }); + await publicClient.waitForTransactionReceipt({ hash }); + + return { hash }; +}; + +/** Transfer native tokens and mine block. */ +export const transferEth = async (params: { + to: Address; + amount: bigint; + sender: Address; +}) => { + const walletClient = createWalletClient({ + chain: anvil, + transport: http(), + account: params.sender, + }); + + const hash = await walletClient.sendTransaction({ + to: params.to, + value: params.amount, }); await testClient.mine({ blocks: 1 }); + await publicClient.waitForTransactionReceipt({ hash }); - await publicClient.waitForTransactionReceipt({ hash: swapHash }); + return { hash }; }; diff --git a/packages/core/src/_test/utils.ts b/packages/core/src/_test/utils.ts index 31d66cb61..31de773e2 100644 --- a/packages/core/src/_test/utils.ts +++ b/packages/core/src/_test/utils.ts @@ -1,52 +1,11 @@ import { type AddressInfo, createServer } from "node:net"; -import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; -import type { Common } from "@/common/common.js"; import { createConfig } from "@/config/config.js"; -import type { RawEvent } from "@/sync/events.js"; +import type { Network } from "@/config/networks.js"; import type { Status } from "@/sync/index.js"; -import type { Source } from "@/sync/source.js"; -import type { - SyncBlock, - SyncCallTrace, - SyncCreateTrace, - SyncLog, - SyncTransaction, - SyncTransactionReceipt, -} from "@/types/sync.js"; -import { - encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, -} from "@/utils/checkpoint.js"; -import { createRequestQueue } from "@/utils/requestQueue.js"; -import { - type Chain, - type Hash, - type Hex, - encodeFunctionData, - encodeFunctionResult, - formatTransactionReceipt, - hexToBigInt, - hexToNumber, - parseEther, -} from "viem"; -import { - http, - checksumAddress, - createPublicClient, - createTestClient, - createWalletClient, - formatBlock, - formatLog, - formatTransaction, - getAbiItem, - slice, - toHex, -} from "viem"; +import type { Address, Chain } from "viem"; +import { http, createPublicClient, createTestClient, getAbiItem } from "viem"; import { mainnet } from "viem/chains"; -import { ALICE, BOB } from "./constants.js"; import { erc20ABI, factoryABI, pairABI } from "./generated.js"; -import type { deploy } from "./simulate.js"; // Anvil test setup adapted from @viem/anvil `example-vitest` repository. // https://github.com/wagmi-dev/anvil.js/tree/main/examples/example-vitest @@ -80,18 +39,15 @@ export const publicClient = createPublicClient({ transport: http(), }); -export const walletClient = createWalletClient({ - chain: anvil, - transport: http(), - account: ALICE, -}); +export const getBlockNumber = async () => + publicClient.getBlockNumber().then(Number); -/** - * Returns the config for the local anvil testing suite. - * The suite contains an erc20 and mock factory + pair event sources. - */ -export const getConfig = (addresses: Awaited>) => - createConfig({ +export const getErc20ConfigAndIndexingFunctions = (params: { + address: Address; + includeCallTraces?: boolean; + includeTransactionReceipts?: boolean; +}) => { + const config = createConfig({ networks: { mainnet: { chainId: 1, @@ -102,554 +58,132 @@ export const getConfig = (addresses: Awaited>) => Erc20: { abi: erc20ABI, network: "mainnet", - address: addresses.erc20Address, - filter: { - event: [ - "Transfer(address indexed from, address indexed to, uint256 amount)", - "Approval", - ], + address: params.address, + includeCallTraces: params.includeCallTraces, + includeTransactionReceipts: params.includeTransactionReceipts, + }, + }, + }); + + const rawIndexingFunctions = params.includeCallTraces + ? [ + { name: "Erc20.transfer()", fn: () => {} }, + { + name: "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)", + fn: () => {}, }, + ] + : [ + { + name: "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)", + fn: () => {}, + }, + ]; + + return { config, rawIndexingFunctions }; +}; + +export const getPairWithFactoryConfigAndIndexingFunctions = (params: { + address: Address; + includeCallTraces?: boolean; + includeTransactionReceipts?: boolean; +}) => { + const config = createConfig({ + networks: { + mainnet: { + chainId: 1, + transport: http(`http://127.0.0.1:8545/${poolId}`), }, + }, + contracts: { Pair: { abi: pairABI, network: "mainnet", factory: { - address: addresses.factoryAddress, + address: params.address, event: getAbiItem({ abi: factoryABI, name: "PairCreated" }), parameter: "pair", }, - includeCallTraces: true, - filter: { - event: ["Swap"], - }, - }, - Factory: { - abi: factoryABI, - network: "mainnet", - address: addresses.factoryAddress, - includeCallTraces: true, - }, - }, - blocks: { - OddBlocks: { - startBlock: 1, - interval: 2, - network: "mainnet", + includeCallTraces: params.includeCallTraces, + includeTransactionReceipts: params.includeTransactionReceipts, }, }, }); -/** - * Returns a network representing the local anvil chain. - * Set `finalityBlockCount` to 4 because `deploy()` + `simulate()` is 4 blocks. - */ -export const getNetworkAndSources = async ( - addresses: Awaited>, - common: Common, -) => { - const config = getConfig(addresses); - const { networks, sources } = await buildConfigAndIndexingFunctions({ - config, - rawIndexingFunctions: [ - { - name: "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)", - fn: () => {}, - }, - { name: "Pair:Swap", fn: () => {} }, - { name: "Pair.swap()", fn: () => {} }, - { name: "OddBlocks:block", fn: () => {} }, - { name: "Factory.createPair()", fn: () => {} }, - ], - options: common.options, - }); - const mainnet = { ...networks[0], finalityBlockCount: 4 }; - - const requestQueue = createRequestQueue({ - network: networks[0]!, - common, - }); + const rawIndexingFunctions = params.includeCallTraces + ? [ + { name: "Pair.swap()", fn: () => {} }, + { name: "Pair:Swap", fn: () => {} }, + ] + : [{ name: "Pair:Swap", fn: () => {} }]; - return { - networks: [mainnet], - sources, - requestQueues: [requestQueue], - }; + return { config, rawIndexingFunctions }; }; -/** - * Returns the logs, block, traces, and transaction data for blocks 1, 2, 3, 4, 5. - * Block 2 has two contract creations. - * Block 2 has two erc20 transfer events. - * Block 3 has a pair creation event. - * Block 4 has a swap event from the newly created pair. - * Block 5 is empty. - */ -export const getRawRPCData = async () => { - const latestBlock = await publicClient.getBlockNumber(); - const logs = await publicClient.request({ - method: "eth_getLogs", - params: [ - { - fromBlock: toHex(latestBlock - 3n), +export const getBlocksConfigAndIndexingFunctions = (params: { + interval: number; +}) => { + const config = createConfig({ + networks: { + mainnet: { + chainId: 1, + transport: http(`http://127.0.0.1:8545/${poolId}`), + }, + }, + blocks: { + Blocks: { + network: "mainnet", + interval: params.interval, }, - ], + }, }); - // Manually add the child address log - logs.push( - ...(await publicClient.request({ - method: "eth_getLogs", - params: [ - { - address: slice(logs[2]!.topics[1]!, 12), - fromBlock: toHex(latestBlock - 3n), - }, - ], - })), - ); + const rawIndexingFunctions = [{ name: "Blocks:block", fn: () => {} }]; - // Dedupe any repeated blocks and txs - const blockNumbers: Set = new Set(); - const txHashes: Set = new Set(); - for (const log of logs) { - if (log.blockNumber) blockNumbers.add(log.blockNumber); - if (log.transactionHash) txHashes.add(log.transactionHash); - } - const blocks = await Promise.all( - [1, 2, 3, 4, 5].map( - (bn) => - publicClient.request({ - method: "eth_getBlockByNumber", - params: [toHex(bn), true], - }) as Promise, - ), - ); - const transactionReceipts = await Promise.all( - [...txHashes].map((tx) => - publicClient.request({ - method: "eth_getTransactionReceipt", - params: [tx], - }), - ), - ); - - return { - block1: { - logs: [], - block: blocks[0], - transactions: [], - transactionReceipts: [], - callTraces: [ - { - action: { - from: ALICE, - gas: "0x0", - init: "0x0", - value: "0x0", - }, - blockHash: blocks[0]!.hash, - blockNumber: blocks[0]!.number, - result: { - address: "0x0", - code: "0x0", - gasUsed: "0x0", - }, - subtraces: 0, - traceAddress: [0], - transactionHash: blocks[0]!.transactions[0]!.hash, - transactionPosition: hexToNumber( - blocks[0]!.transactions[0]!.transactionIndex, - ), - type: "create", - }, - { - action: { - from: ALICE, - gas: "0x0", - init: "0x0", - value: "0x0", - }, - blockHash: blocks[0]!.hash, - blockNumber: blocks[0]!.number, - result: { - address: "0x0", - code: "0x0", - gasUsed: "0x0", - }, - subtraces: 0, - traceAddress: [0], - transactionHash: blocks[0]!.transactions[1]!.hash, - transactionPosition: hexToNumber( - blocks[0]!.transactions[1]!.transactionIndex, - ), - type: "create", - }, - ], - }, - block2: { - logs: [logs[0]!, logs[1]!], - block: blocks[1]!, - transactions: blocks[1]!.transactions, - transactionReceipts: transactionReceipts.filter( - (tr) => tr?.blockNumber === blocks[1]?.number, - ), - callTraces: [ - { - action: { - callType: "call", - from: ALICE, - gas: "0x0", - input: encodeFunctionData({ - abi: erc20ABI, - functionName: "mint", - args: [ALICE, parseEther("1")], - }), - to: logs[0]!.address, - value: "0x0", - }, - blockHash: blocks[1]!.hash, - blockNumber: blocks[1]!.number, - result: { - gasUsed: "0x0", - output: encodeFunctionResult({ - abi: erc20ABI, - functionName: "mint", - }), - }, - subtraces: 0, - traceAddress: [0], - transactionHash: blocks[1]!.transactions[0]!.hash, - transactionPosition: hexToNumber( - blocks[1]!.transactions[0]!.transactionIndex, - ), - type: "call", - }, - { - action: { - callType: "call", - from: ALICE, - gas: "0x0", - input: encodeFunctionData({ - abi: erc20ABI, - functionName: "mint", - args: [BOB, parseEther("1")], - }), - to: logs[1]!.address, - value: "0x0", - }, - blockHash: blocks[1]!.hash, - blockNumber: blocks[1]!.number, - result: { - gasUsed: "0x0", - output: encodeFunctionResult({ - abi: erc20ABI, - functionName: "mint", - }), - }, - subtraces: 0, - traceAddress: [0], - transactionHash: blocks[1]!.transactions[1]!.hash, - transactionPosition: hexToNumber( - blocks[1]!.transactions[1]!.transactionIndex, - ), - type: "call", - }, - ], - }, - block3: { - logs: [logs[2]], - block: blocks[2], - transactions: blocks[2]!.transactions, - transactionReceipts: transactionReceipts.filter( - (tr) => tr?.blockNumber === blocks[2]?.number, - ), - callTraces: [ - { - action: { - callType: "call", - from: ALICE, - gas: "0x0", - input: encodeFunctionData({ - abi: factoryABI, - functionName: "createPair", - }), - to: logs[2]!.address, - value: "0x0", - }, - blockHash: blocks[2]!.hash, - blockNumber: blocks[2]!.number, - result: { - gasUsed: "0x0", - output: encodeFunctionResult({ - abi: factoryABI, - functionName: "createPair", - result: logs[3]!.address, - }), - }, - subtraces: 0, - traceAddress: [0], - transactionHash: blocks[2]!.transactions[0]!.hash, - transactionPosition: hexToNumber( - blocks[2]!.transactions[0]!.transactionIndex, - ), - type: "call", - }, - ], - }, - block4: { - logs: [logs[3]], - block: blocks[3], - transactions: blocks[3]!.transactions, - transactionReceipts: transactionReceipts.filter( - (tr) => tr?.blockNumber === blocks[3]?.number, - ), - callTraces: [ - { - action: { - callType: "call", - from: ALICE, - gas: "0x0", - input: encodeFunctionData({ - abi: pairABI, - functionName: "swap", - args: [1n, 2n, ALICE], - }), - to: logs[3]!.address, - value: "0x0", - }, - blockHash: blocks[3]!.hash, - blockNumber: blocks[3]!.number, - result: { - gasUsed: "0x0", - output: encodeFunctionResult({ - abi: pairABI, - functionName: "swap", - }), - }, - subtraces: 0, - traceAddress: [0], - transactionHash: blocks[3]!.transactions[0]!.hash, - transactionPosition: hexToNumber( - blocks[3]!.transactions[0]!.transactionIndex, - ), - type: "call", - }, - ], - }, - block5: { - logs: [], - block: blocks[4]!, - transactions: [], - transactionReceipts: [], - callTraces: [], - }, - } as unknown as { - block1: { - logs: []; - block: SyncBlock; - transactions: []; - transactionReceipts: []; - callTraces: [SyncCreateTrace, SyncCreateTrace]; - }; - block2: { - logs: [SyncLog, SyncLog]; - block: SyncBlock; - transactions: [SyncTransaction, SyncTransaction]; - transactionReceipts: [SyncTransactionReceipt, SyncTransactionReceipt]; - callTraces: [SyncCallTrace, SyncCallTrace]; - }; - block3: { - logs: [SyncLog]; - block: SyncBlock; - transactions: [SyncTransaction]; - transactionReceipts: [SyncTransactionReceipt]; - callTraces: [SyncCallTrace]; - }; - block4: { - logs: [SyncLog]; - block: SyncBlock; - transactions: [SyncTransaction]; - transactionReceipts: [SyncTransactionReceipt]; - callTraces: [SyncCallTrace]; - }; - block5: { - logs: []; - block: SyncBlock; - transactions: []; - transactionReceipts: []; - callTraces: []; - }; - }; + return { config, rawIndexingFunctions }; }; -/** - * Mock function for `getEvents` that specifically returns the event data for the log and factory sources. - */ -export const getEventsLog = async (sources: Source[]): Promise => { - const rpcData = await getRawRPCData(); - - return [ - { - log: rpcData.block2.logs[0], - block: rpcData.block2.block, - transaction: rpcData.block2.transactions[0]!, - transactionReceipt: rpcData.block2.transactionReceipts[0]!, - }, - { - log: rpcData.block2.logs[1], - block: rpcData.block2.block, - transaction: rpcData.block2.transactions[1]!, - transactionReceipt: rpcData.block2.transactionReceipts[1]!, - }, - { - log: rpcData.block4.logs[0], - block: rpcData.block4.block, - transaction: rpcData.block4.transactions[0]!, - transactionReceipt: rpcData.block4.transactionReceipts[0]!, - }, - ] - .map((e) => ({ - log: formatLog(e.log), - block: formatBlock(e.block), - transaction: formatTransaction(e.transaction), - transactionReceipt: formatTransactionReceipt(e.transactionReceipt), - })) - .map(({ log, block, transaction, transactionReceipt }, i) => ({ - sourceIndex: i === 0 || i === 1 ? 0 : 1, - chainId: sources[0]!.filter.chainId, - checkpoint: encodeCheckpoint({ - blockTimestamp: Number(block.timestamp), - chainId: BigInt(sources[0]!.filter.chainId), - blockNumber: block.number!, - transactionIndex: BigInt(transaction.transactionIndex!), - eventType: 5, - eventIndex: BigInt(log.logIndex!), - }), - log: { - ...log, - id: `${log.blockHash}-${toHex(log.logIndex!)}`, - address: checksumAddress(log.address), - }, - block: { ...block, miner: checksumAddress(block.miner) }, - transaction: { - ...transaction, - from: checksumAddress(transaction.from), - to: transaction.to ? checksumAddress(transaction.to) : transaction.to, +export const getAccountsConfigAndIndexingFunctions = (params: { + address: Address; +}) => { + const config = createConfig({ + networks: { + mainnet: { + chainId: 1, + transport: http(`http://127.0.0.1:8545/${poolId}`), }, - transactionReceipt: { - ...transactionReceipt, - from: checksumAddress(transactionReceipt.from), - to: transactionReceipt.to - ? checksumAddress(transactionReceipt.to) - : transactionReceipt.to, - logs: transactionReceipt.logs.map((l) => ({ - ...l, - id: `${l.blockHash}-${toHex(l.logIndex!)}`, - })), + }, + accounts: { + Accounts: { + network: "mainnet", + address: params.address, }, - })) as RawEvent[]; -}; - -/** - * Mock function for `getEvents` that specifically returns the event data for the block sources. - */ -export const getEventsBlock = async ( - sources: Source[], -): Promise => { - const rpcData = await getRawRPCData(); - - return [ - { - block: rpcData.block3.block, }, - ] - .map((e) => ({ - block: formatBlock(e.block), - })) - .map(({ block }) => ({ - sourceIndex: 4, - chainId: sources[4]!.filter.chainId, - checkpoint: encodeCheckpoint({ - blockTimestamp: Number(block.timestamp), - chainId: BigInt(sources[0]!.filter.chainId), - blockNumber: block.number!, - transactionIndex: maxCheckpoint.transactionIndex, - eventType: 5, - eventIndex: zeroCheckpoint.eventIndex, - }), + }); - block: { ...block, miner: checksumAddress(block.miner) }, - })) as RawEvent[]; -}; + const rawIndexingFunctions = [ + { name: "Accounts:transaction:from", fn: () => {} }, + { name: "Accounts:transaction:to", fn: () => {} }, + { name: "Accounts:transfer:from", fn: () => {} }, + { name: "Accounts:transfer:to", fn: () => {} }, + ]; -/** - * Mock function for `getEvents` that specifically returns the event data for the trace sources. - */ -export const getEventsTrace = async ( - sources: Source[], -): Promise => { - const rpcData = await getRawRPCData(); + return { config, rawIndexingFunctions }; +}; - return [ - { - trace: rpcData.block3.callTraces[0], - block: rpcData.block3.block, - transaction: rpcData.block3.transactions[0]!, - transactionReceipt: rpcData.block3.transactionReceipts[0]!, - }, - ] - .map((e) => ({ - trace: e.trace, - block: formatBlock(e.block), - transaction: formatTransaction(e.transaction), - transactionReceipt: formatTransactionReceipt(e.transactionReceipt), - })) - .map(({ trace, block, transaction, transactionReceipt }) => ({ - sourceIndex: 3, - chainId: sources[3]!.filter.chainId, - checkpoint: encodeCheckpoint({ - blockTimestamp: Number(block.timestamp), - chainId: BigInt(sources[0]!.filter.chainId), - blockNumber: block.number!, - transactionIndex: BigInt(transaction.transactionIndex!), - eventType: 7, - eventIndex: 0n, - }), - trace: { - id: `${trace.transactionHash}-${JSON.stringify(trace.traceAddress)}`, - from: checksumAddress(trace.action.from), - to: checksumAddress(trace.action.to), - gas: hexToBigInt(trace.action.gas), - value: hexToBigInt(trace.action.value), - input: trace.action.input, - output: trace.result!.output, - gasUsed: hexToBigInt(trace.result!.gasUsed), - subtraces: trace.subtraces, - traceAddress: trace.traceAddress, - blockHash: trace.blockHash, - blockNumber: hexToBigInt(trace.blockNumber), - transactionHash: trace.transactionHash, - transactionIndex: trace.transactionPosition, - callType: trace.action.callType, - }, - block: { ...block, miner: checksumAddress(block.miner) }, - transaction: { - ...transaction, - from: checksumAddress(transaction.from), - to: transaction.to ? checksumAddress(transaction.to) : transaction.to, - }, - transactionReceipt: { - ...transactionReceipt, - from: checksumAddress(transactionReceipt.from), - to: transactionReceipt.to - ? checksumAddress(transactionReceipt.to) - : transactionReceipt.to, - logs: transactionReceipt.logs.map((l) => ({ - ...l, - id: `${l.blockHash}-${toHex(l.logIndex!)}`, - })), - }, - })) as RawEvent[]; +export const getNetwork = (params?: { + finalityBlockCount?: number; +}) => { + return { + name: "mainnet", + chainId: 1, + chain: anvil, + transport: http(`http://127.0.0.1:8545/${poolId}`)({ chain: anvil }), + maxRequestsPerSecond: 50, + pollingInterval: 1_000, + finalityBlockCount: params?.finalityBlockCount ?? 1, + disableCache: false, + } satisfies Network; }; export function getFreePort(): Promise { diff --git a/packages/core/src/bin/utils/run.test.ts b/packages/core/src/bin/utils/run.test.ts index 6e437f978..2a4120df2 100644 --- a/packages/core/src/bin/utils/run.test.ts +++ b/packages/core/src/bin/utils/run.test.ts @@ -1,8 +1,13 @@ +import { ALICE } from "@/_test/constants.js"; import { setupAnvil, setupCommon, setupIsolatedDatabase, } from "@/_test/setup.js"; +import { deployErc20 } from "@/_test/simulate.js"; +import { getErc20ConfigAndIndexingFunctions } from "@/_test/utils.js"; +import { getNetwork } from "@/_test/utils.js"; +import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; import type { IndexingBuild } from "@/build/index.js"; import { buildSchema } from "@/build/schema.js"; import { createDatabase } from "@/database/index.js"; @@ -23,6 +28,23 @@ const account = onchainTable("account", (p) => ({ // const graphqlSchema = buildGraphQLSchema({ schema: { account } }); test("run() setup", async (context) => { + const network = getNetwork(); + + const { address } = await deployErc20({ sender: ALICE }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const indexingFunctions = { "Erc20:setup": vi.fn(), }; @@ -37,8 +59,8 @@ test("run() setup", async (context) => { instanceId: "1234", schema: { account }, databaseConfig: context.databaseConfig, - networks: context.networks, - sources: context.sources, + networks: [network], + sources, indexingFunctions, statements, namespace, @@ -70,6 +92,23 @@ test("run() setup", async (context) => { }); test("run() setup error", async (context) => { + const network = getNetwork(); + + const { address } = await deployErc20({ sender: ALICE }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const indexingFunctions = { "Erc20:setup": vi.fn(), }; @@ -85,8 +124,8 @@ test("run() setup error", async (context) => { instanceId: "1234", schema: { account }, databaseConfig: context.databaseConfig, - networks: context.networks, - sources: context.sources, + networks: [network], + sources, indexingFunctions, statements, namespace, diff --git a/packages/core/src/build/configAndIndexingFunctions.test.ts b/packages/core/src/build/configAndIndexingFunctions.test.ts index 518ebb997..3631d12b2 100644 --- a/packages/core/src/build/configAndIndexingFunctions.test.ts +++ b/packages/core/src/build/configAndIndexingFunctions.test.ts @@ -1,12 +1,12 @@ import path from "node:path"; import type { Options } from "@/common/options.js"; -import type { CallTraceFilter, LogFactory, LogFilter } from "@/sync/source.js"; +import type { LogFactory, LogFilter, TraceFilter } from "@/sync/source.js"; import { http, type Address, - getEventSelector, - getFunctionSelector, parseAbiItem, + toEventSelector, + toFunctionSelector, zeroAddress, } from "viem"; import { expect, test, vi } from "vitest"; @@ -61,8 +61,9 @@ test("buildConfigAndIndexingFunctions() builds topics for multiple events", asyn options, }); - expect((sources[0]!.filter as LogFilter).topics).toMatchObject([ - [getEventSelector(event0), getEventSelector(event1)], + expect((sources[0]!.filter as LogFilter).topic0).toMatchObject([ + toEventSelector(event0), + toEventSelector(event1), ]); }); @@ -94,8 +95,9 @@ test("buildConfigAndIndexingFunctions() handles overloaded event signatures and options, }); - expect((sources[0]!.filter as LogFilter).topics).toMatchObject([ - [getEventSelector(event1), getEventSelector(event1Overloaded)], + expect((sources[0]!.filter as LogFilter).topic0).toMatchObject([ + toEventSelector(event1), + toEventSelector(event1Overloaded), ]); }); @@ -150,10 +152,10 @@ test("buildConfigAndIndexingFunctions() builds topics for event with args", asyn options, }); - expect((sources[0]!.filter as LogFilter).topics).toMatchObject([ - [getEventSelector(event0)], - bytes1, + expect((sources[0]!.filter as LogFilter).topic0).toMatchObject([ + toEventSelector(event0), ]); + expect((sources[0]!.filter as LogFilter).topic1).toMatchObject(bytes1); }); test("buildConfigAndIndexingFunctions() builds topics for event with unnamed parameters", async () => { @@ -182,9 +184,12 @@ test("buildConfigAndIndexingFunctions() builds topics for event with unnamed par options, }); - expect((sources[0]!.filter as LogFilter).topics).toMatchObject([ - [getEventSelector(event1Overloaded)], - [bytes1, bytes2], + expect((sources[0]!.filter as LogFilter).topic0).toMatchObject([ + toEventSelector(event1Overloaded), + ]); + expect((sources[0]!.filter as LogFilter).topic1).toMatchObject([ + bytes1, + bytes2, ]); }); @@ -267,7 +272,7 @@ test("buildConfigAndIndexingFunctions() validates network name", async () => { expect(result.status).toBe("error"); expect(result.error?.message).toBe( - "Validation failed: Invalid network for contract 'a'. Got 'mainnetz', expected one of ['mainnet'].", + "Validation failed: Invalid network for 'a'. Got 'mainnetz', expected one of ['mainnet'].", ); }); @@ -470,7 +475,7 @@ test("buildConfigAndIndexingFunctions() validates address length", async () => { ); }); -test("buildConfigAndIndexingFunctions() coerces NaN startBlock to 0", async () => { +test("buildConfigAndIndexingFunctions() coerces NaN startBlock to undefined", async () => { const config = createConfig({ networks: { mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, @@ -490,39 +495,37 @@ test("buildConfigAndIndexingFunctions() coerces NaN startBlock to 0", async () = options, }); - expect(sources[0]?.filter.fromBlock).toBe(0); + expect(sources[0]?.filter.fromBlock).toBe(undefined); }); -test("buildConfigAndIndexingFunctions() includeTransactionReceipts", async () => { - const config = createConfig({ - networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, - optimism: { chainId: 10, transport: http("http://127.0.0.1:8545") }, - }, - contracts: { - a: { - includeTransactionReceipts: true, - network: { - mainnet: {}, - optimism: { includeTransactionReceipts: false }, - }, - abi: [event0], - }, - }, - }); - - const { sources } = await buildConfigAndIndexingFunctions({ - config, - rawIndexingFunctions: [{ name: "a:Event0", fn: () => {} }], - options, - }); - - expect((sources[0]!.filter as LogFilter).includeTransactionReceipts).toBe( - true, - ); - expect((sources[1]!.filter as LogFilter).includeTransactionReceipts).toBe( - false, - ); +test.skip("buildConfigAndIndexingFunctions() includeTransactionReceipts", async () => { + // const config = createConfig({ + // networks: { + // mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + // optimism: { chainId: 10, transport: http("http://127.0.0.1:8545") }, + // }, + // contracts: { + // a: { + // includeTransactionReceipts: true, + // network: { + // mainnet: {}, + // optimism: { includeTransactionReceipts: false }, + // }, + // abi: [event0], + // }, + // }, + // }); + // const { sources } = await buildConfigAndIndexingFunctions({ + // config, + // rawIndexingFunctions: [{ name: "a:Event0", fn: () => {} }], + // options, + // }); + // expect((sources[0]!.filter as LogFilter).includeTransactionReceipts).toBe( + // true, + // ); + // expect((sources[1]!.filter as LogFilter).includeTransactionReceipts).toBe( + // false, + // ); }); test("buildConfigAndIndexingFunctions() includeCallTraces", async () => { @@ -552,16 +555,16 @@ test("buildConfigAndIndexingFunctions() includeCallTraces", async () => { expect(sources).toHaveLength(1); - expect((sources[0]!.filter as CallTraceFilter).fromAddress).toBeUndefined(); - expect((sources[0]!.filter as CallTraceFilter).toAddress).toMatchObject([ + expect((sources[0]!.filter as TraceFilter).fromAddress).toBeUndefined(); + expect((sources[0]!.filter as TraceFilter).toAddress).toMatchObject([ zeroAddress, ]); - expect( - (sources[0]!.filter as CallTraceFilter).functionSelectors, - ).toMatchObject([getFunctionSelector(func0)]); - expect( - (sources[0]!.filter as CallTraceFilter).includeTransactionReceipts, - ).toBe(false); + expect((sources[0]!.filter as TraceFilter).functionSelector).toMatchObject([ + toFunctionSelector(func0), + ]); + // expect((sources[0]!.filter as TraceFilter).includeTransactionReceipts).toBe( + // false, + // ); }); test("buildConfigAndIndexingFunctions() includeCallTraces with factory", async () => { @@ -595,16 +598,16 @@ test("buildConfigAndIndexingFunctions() includeCallTraces with factory", async ( expect(sources).toHaveLength(1); - expect((sources[0]!.filter as CallTraceFilter).fromAddress).toBeUndefined(); + expect((sources[0]!.filter as TraceFilter).fromAddress).toBeUndefined(); expect( - ((sources[0]!.filter as CallTraceFilter).toAddress as LogFactory).address, + ((sources[0]!.filter as TraceFilter).toAddress as LogFactory).address, ).toMatchObject(address2); - expect( - (sources[0]!.filter as CallTraceFilter).functionSelectors, - ).toMatchObject([getFunctionSelector(func0)]); - expect( - (sources[0]!.filter as CallTraceFilter).includeTransactionReceipts, - ).toBe(false); + expect((sources[0]!.filter as TraceFilter).functionSelector).toMatchObject([ + toFunctionSelector(func0), + ]); + // expect( + // (sources[0]!.filter as TraceFilter).includeTransactionReceipts, + // ).toBe(false); }); test("buildConfigAndIndexingFunctions() coerces NaN endBlock to undefined", async () => { @@ -798,3 +801,76 @@ test("buildConfigAndIndexingFunctions() database with postgres uses pool config" }, }); }); + +test("buildConfigAndIndexingFunctions() account source", async () => { + const config = createConfig({ + networks: { + mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + }, + accounts: { + a: { + network: { mainnet: {} }, + address: address1, + startBlock: 16370000, + endBlock: 16370020, + }, + }, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions: [ + { name: "a:transfer:from", fn: () => {} }, + { name: "a:transaction:to", fn: () => {} }, + ], + options, + }); + + expect(sources).toHaveLength(2); + + expect(sources[0]?.networkName).toBe("mainnet"); + expect(sources[1]?.networkName).toBe("mainnet"); + + expect(sources[0]?.name).toBe("a"); + expect(sources[1]?.name).toBe("a"); + + expect(sources[0]?.filter.type).toBe("transaction"); + expect(sources[1]?.filter.type).toBe("transfer"); + + expect(sources[0]?.filter.fromBlock).toBe(16370000); + expect(sources[1]?.filter.fromBlock).toBe(16370000); + + expect(sources[0]?.filter.toBlock).toBe(16370020); + expect(sources[1]?.filter.toBlock).toBe(16370020); +}); + +test("buildConfigAndIndexingFunctions() block source", async () => { + const config = createConfig({ + networks: { + mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + }, + blocks: { + a: { + network: { mainnet: {} }, + startBlock: 16370000, + endBlock: 16370020, + }, + }, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions: [{ name: "a:block", fn: () => {} }], + options, + }); + + expect(sources).toHaveLength(1); + + expect(sources[0]?.networkName).toBe("mainnet"); + expect(sources[0]?.name).toBe("a"); + expect(sources[0]?.filter.type).toBe("block"); + // @ts-ignore + expect(sources[0]?.filter.interval).toBe(1); + expect(sources[0]?.filter.fromBlock).toBe(16370000); + expect(sources[0]?.filter.toBlock).toBe(16370020); +}); diff --git a/packages/core/src/build/configAndIndexingFunctions.ts b/packages/core/src/build/configAndIndexingFunctions.ts index 479abcb39..7d9dd8ffc 100644 --- a/packages/core/src/build/configAndIndexingFunctions.ts +++ b/packages/core/src/build/configAndIndexingFunctions.ts @@ -10,10 +10,14 @@ import { isRpcUrlPublic, } from "@/config/networks.js"; import { buildAbiEvents, buildAbiFunctions, buildTopics } from "@/sync/abi.js"; -import type { BlockSource, ContractSource } from "@/sync/source.js"; +import type { + AccountSource, + BlockSource, + ContractSource, + Source, +} from "@/sync/source.js"; import { chains } from "@/utils/chains.js"; import { toLowerCase } from "@/utils/lowercase.js"; -import { dedupe } from "@ponder/common"; import parse from "pg-connection-string"; import type { Hex, LogTopic } from "viem"; import { buildLogFactory } from "./factory.js"; @@ -27,6 +31,36 @@ export type IndexingFunctions = { [eventName: string]: (...args: any) => any; }; +const flattenSources = < + T extends Config["contracts"] | Config["accounts"] | Config["blocks"], +>( + config: T, +): (Omit & { name: string; network: string })[] => { + return Object.entries(config).flatMap( + ([name, source]: [string, T[string]]) => { + if (typeof source.network === "string") { + return { + name, + ...source, + }; + } else { + return Object.entries(source.network).map( + ([network, sourceOverride]) => { + const { network: _network, ...base } = source; + + return { + name, + network, + ...base, + ...sourceOverride, + }; + }, + ); + } + }, + ); +}; + export async function buildConfigAndIndexingFunctions({ config, rawIndexingFunctions, @@ -38,7 +72,7 @@ export async function buildConfigAndIndexingFunctions({ }): Promise<{ databaseConfig: DatabaseConfig; networks: Network[]; - sources: (BlockSource | ContractSource)[]; + sources: Source[]; indexingFunctions: IndexingFunctions; logs: { level: "warn" | "info" | "debug"; msg: string }[]; }> { @@ -172,6 +206,20 @@ export async function buildConfigAndIndexingFunctions({ }), ); + const sourceNames = new Set(); + for (const source of [ + ...Object.keys(config.contracts ?? {}), + ...Object.keys(config.accounts ?? {}), + ...Object.keys(config.blocks ?? {}), + ]) { + if (sourceNames.has(source)) { + throw new Error( + `Validation failed: Duplicate source name '${source}' not allowed.`, + ); + } + sourceNames.add(source); + } + // Validate and build indexing functions let indexingFunctionCount = 0; const indexingFunctions: IndexingFunctions = {}; @@ -180,10 +228,37 @@ export async function buildConfigAndIndexingFunctions({ const eventNameComponents = eventName.includes(".") ? eventName.split(".") : eventName.split(":"); - const [sourceName, sourceEventName] = eventNameComponents; - if (eventNameComponents.length !== 2 || !sourceName || !sourceEventName) { + + const [sourceName] = eventNameComponents; + + if (!sourceName) { throw new Error( - `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:{eventName}' or '{sourceName}.{eventName}'.`, + `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:{eventName}' or '{sourceName}.{functionName}'.`, + ); + } + + if (eventNameComponents.length === 3) { + const [, sourceType, fromOrTo] = eventNameComponents; + + if ( + (sourceType !== "transaction" && sourceType !== "transfer") || + (fromOrTo !== "from" && fromOrTo !== "to") + ) { + throw new Error( + `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:transaction:from', '{sourceName}:transaction:to', '{sourceName}:transfer:from', or '{sourceName}:transfer:to'.`, + ); + } + } else if (eventNameComponents.length === 2) { + const [, sourceEventName] = eventNameComponents; + + if (!sourceEventName) { + throw new Error( + `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:{eventName}' or '{sourceName}.{functionName}'.`, + ); + } + } else { + throw new Error( + `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:{eventName}' or '{sourceName}.{functionName}'.`, ); } @@ -196,16 +271,15 @@ export async function buildConfigAndIndexingFunctions({ // Validate that the indexing function uses a sourceName that is present in the config. const matchedSourceName = Object.keys({ ...(config.contracts ?? {}), + ...(config.accounts ?? {}), ...(config.blocks ?? {}), }).find((_sourceName) => _sourceName === sourceName); if (!matchedSourceName) { - // Multi-network has N sources, but the hint here should not have duplicates. - const uniqueSourceNames = dedupe( - Object.keys({ ...(config.contracts ?? {}), ...(config.blocks ?? {}) }), - ); throw new Error( - `Validation failed: Invalid source name '${sourceName}'. Got '${sourceName}', expected one of [${uniqueSourceNames + `Validation failed: Invalid source name '${sourceName}'. Got '${sourceName}', expected one of [${Array.from( + sourceNames, + ) .map((n) => `'${n}'`) .join(", ")}].`, ); @@ -219,118 +293,56 @@ export async function buildConfigAndIndexingFunctions({ logs.push({ level: "warn", msg: "No indexing functions were registered." }); } - const contractSources: ContractSource[] = Object.entries( - config.contracts ?? {}, - ) - // First, apply any network-specific overrides and flatten the result. - .flatMap(([contractName, contract]) => { - if (contract.network === null || contract.network === undefined) { - throw new Error( - `Validation failed: Network for contract '${contractName}' is null or undefined. Expected one of [${networks - .map((n) => `'${n.name}'`) - .join(", ")}].`, - ); - } - - const startBlockMaybeNan = contract.startBlock ?? 0; - const startBlock = Number.isNaN(startBlockMaybeNan) - ? 0 - : startBlockMaybeNan; - const endBlockMaybeNan = contract.endBlock; - const endBlock = Number.isNaN(endBlockMaybeNan) - ? undefined - : endBlockMaybeNan; - - if (endBlock !== undefined && endBlock < startBlock) { - throw new Error( - `Validation failed: Start block for contract '${contractName}' is after end block (${startBlock} > ${endBlock}).`, - ); - } - - // Single network case. - if (typeof contract.network === "string") { - return { - id: `log_${contractName}_${contract.network}`, - name: contractName, - networkName: contract.network, - abi: contract.abi, - - address: "address" in contract ? contract.address : undefined, - factory: "factory" in contract ? contract.factory : undefined, - filter: contract.filter, - - includeTransactionReceipts: - contract.includeTransactionReceipts ?? false, - includeCallTraces: contract.includeCallTraces ?? false, + // common validation for all sources + for (const source of [ + ...flattenSources(config.contracts ?? {}), + ...flattenSources(config.accounts ?? {}), + ...flattenSources(config.blocks ?? {}), + ]) { + if (source.network === null || source.network === undefined) { + throw new Error( + `Validation failed: Network for '${source.name}' is null or undefined. Expected one of [${networks + .map((n) => `'${n.name}'`) + .join(", ")}].`, + ); + } - startBlock, - endBlock, - }; - } + const startBlockMaybeNan = source.startBlock; + const startBlock = Number.isNaN(startBlockMaybeNan) + ? undefined + : startBlockMaybeNan; + const endBlockMaybeNan = source.endBlock; + const endBlock = Number.isNaN(endBlockMaybeNan) + ? undefined + : endBlockMaybeNan; + + if ( + startBlock !== undefined && + endBlock !== undefined && + endBlock < startBlock + ) { + throw new Error( + `Validation failed: Start block for '${source.name}' is after end block (${startBlock} > ${endBlock}).`, + ); + } - type DefinedNetworkOverride = NonNullable< - Exclude[string] - >; - - // Multiple networks case. - return Object.entries(contract.network) - .filter((n): n is [string, DefinedNetworkOverride] => !!n[1]) - .map(([networkName, overrides]) => { - const startBlockMaybeNan = - overrides.startBlock ?? contract.startBlock ?? 0; - const startBlock = Number.isNaN(startBlockMaybeNan) - ? 0 - : startBlockMaybeNan; - const endBlockMaybeNan = overrides.endBlock ?? contract.endBlock; - const endBlock = Number.isNaN(endBlockMaybeNan) - ? undefined - : endBlockMaybeNan; - - if (endBlock !== undefined && endBlock < startBlock) { - throw new Error( - `Validation failed: Start block for contract '${contractName}' is after end block (${startBlock} > ${endBlock}).`, - ); - } + const network = networks.find((n) => n.name === source.network); + if (!network) { + throw new Error( + `Validation failed: Invalid network for '${ + source.name + }'. Got '${source.network}', expected one of [${networks + .map((n) => `'${n.name}'`) + .join(", ")}].`, + ); + } + } - return { - name: contractName, - networkName, - abi: contract.abi, - - address: - ("address" in overrides ? overrides?.address : undefined) ?? - ("address" in contract ? contract.address : undefined), - factory: - ("factory" in overrides ? overrides.factory : undefined) ?? - ("factory" in contract ? contract.factory : undefined), - filter: overrides.filter ?? contract.filter, - - includeTransactionReceipts: - overrides.includeTransactionReceipts ?? - contract.includeTransactionReceipts ?? - false, - includeCallTraces: - overrides.includeCallTraces ?? - contract.includeCallTraces ?? - false, - - startBlock, - endBlock, - }; - }); - }) - // Second, build and validate the factory or log source. - .flatMap((rawContract): ContractSource[] => { - const network = networks.find((n) => n.name === rawContract.networkName); - if (!network) { - throw new Error( - `Validation failed: Invalid network for contract '${ - rawContract.name - }'. Got '${rawContract.networkName}', expected one of [${networks - .map((n) => `'${n.name}'`) - .join(", ")}].`, - ); - } + const contractSources: ContractSource[] = flattenSources( + config.contracts ?? {}, + ) + .flatMap((source): ContractSource[] => { + const network = networks.find((n) => n.name === source.network)!; // Get indexing function that were registered for this contract const registeredLogEvents: string[] = []; @@ -342,29 +354,26 @@ export async function buildConfigAndIndexingFunctions({ string, string, ]; - if ( - logContractName === rawContract.name && - logEventName !== "setup" - ) { + if (logContractName === source.name && logEventName !== "setup") { registeredLogEvents.push(logEventName); } } - // call trace event + // trace event if (eventName.includes(".")) { const [functionContractName, functionName] = eventName.split(".") as [ string, string, ]; - if (functionContractName === rawContract.name) { + if (functionContractName === source.name) { registeredCallTraceEvents.push(functionName); } } } // Note: This can probably throw for invalid ABIs. Consider adding explicit ABI validation before this line. - const abiEvents = buildAbiEvents({ abi: rawContract.abi }); - const abiFunctions = buildAbiFunctions({ abi: rawContract.abi }); + const abiEvents = buildAbiEvents({ abi: source.abi }); + const abiFunctions = buildAbiFunctions({ abi: source.abi }); const registeredEventSelectors: Hex[] = []; // Validate that the registered log events exist in the abi @@ -399,28 +408,31 @@ export async function buildConfigAndIndexingFunctions({ registeredFunctionSelectors.push(abiFunction.selector); } - let topics: LogTopic[] = [registeredEventSelectors]; + let topic0: LogTopic = registeredEventSelectors; + let topic1: LogTopic = null; + let topic2: LogTopic = null; + let topic3: LogTopic = null; - if (rawContract.filter !== undefined) { + if (source.filter !== undefined) { if ( - Array.isArray(rawContract.filter.event) && - rawContract.filter.args !== undefined + Array.isArray(source.filter.event) && + source.filter.args !== undefined ) { throw new Error( - `Validation failed: Event filter for contract '${rawContract.name}' cannot contain indexed argument values if multiple events are provided.`, + `Validation failed: Event filter for contract '${source.name}' cannot contain indexed argument values if multiple events are provided.`, ); } - const filterSafeEventNames = Array.isArray(rawContract.filter.event) - ? rawContract.filter.event - : [rawContract.filter.event]; + const filterSafeEventNames = Array.isArray(source.filter.event) + ? source.filter.event + : [source.filter.event]; for (const filterSafeEventName of filterSafeEventNames) { const abiEvent = abiEvents.bySafeName[filterSafeEventName]; if (!abiEvent) { throw new Error( `Validation failed: Invalid filter for contract '${ - rawContract.name + source.name }'. Got event name '${filterSafeEventName}', expected one of [${Object.keys( abiEvents.bySafeName, ) @@ -434,10 +446,12 @@ export async function buildConfigAndIndexingFunctions({ // The first element of the array return from `buildTopics` being defined // is an invariant of the current filter design. // Note: This can throw. - const [topic0FromFilter, ...topicsFromFilter] = buildTopics( - rawContract.abi, - rawContract.filter, - ) as [Exclude, ...LogTopic[]]; + + const topics = buildTopics(source.abi, source.filter); + const topic0FromFilter = topics.topic0; + topic1 = topics.topic1; + topic2 = topics.topic2; + topic3 = topics.topic3; const filteredEventSelectors = Array.isArray(topic0FromFilter) ? topic0FromFilter @@ -453,7 +467,7 @@ export async function buildConfigAndIndexingFunctions({ throw new Error( `Validation failed: Event '${logEventName}' is excluded by the event filter defined on the contract '${ - rawContract.name + source.name }'. Got '${logEventName}', expected one of [${filteredEventSelectors .map((s) => abiEvents.bySelector[s]!.safeName) .map((eventName) => `'${eventName}'`) @@ -462,20 +476,29 @@ export async function buildConfigAndIndexingFunctions({ } } - topics = [registeredEventSelectors, ...topicsFromFilter]; + topic0 = registeredEventSelectors; } + const startBlockMaybeNan = source.startBlock; + const fromBlock = Number.isNaN(startBlockMaybeNan) + ? undefined + : startBlockMaybeNan; + const endBlockMaybeNan = source.endBlock; + const toBlock = Number.isNaN(endBlockMaybeNan) + ? undefined + : endBlockMaybeNan; + const contractMetadata = { type: "contract", - abi: rawContract.abi, + abi: source.abi, abiEvents, abiFunctions, - name: rawContract.name, - networkName: rawContract.networkName, + name: source.name, + networkName: source.network, } as const; - const resolvedFactory = rawContract?.factory; - const resolvedAddress = rawContract?.address; + const resolvedFactory = source?.factory; + const resolvedAddress = source?.address; if (resolvedFactory !== undefined && resolvedAddress !== undefined) { throw new Error( @@ -496,50 +519,53 @@ export async function buildConfigAndIndexingFunctions({ type: "log", chainId: network.chainId, address: logFactory, - topics, - includeTransactionReceipts: rawContract.includeTransactionReceipts, - fromBlock: rawContract.startBlock, - toBlock: rawContract.endBlock, + topic0, + topic1, + topic2, + topic3, + // includeTransactionReceipts: source.includeTransactionReceipts, + fromBlock, + toBlock, }, } satisfies ContractSource; - if (rawContract.includeCallTraces) { + if (source.includeCallTraces) { return [ logSource, { ...contractMetadata, filter: { - type: "callTrace", + type: "trace", chainId: network.chainId, fromAddress: undefined, toAddress: logFactory, - functionSelectors: registeredFunctionSelectors, - includeTransactionReceipts: - rawContract.includeTransactionReceipts, - fromBlock: rawContract.startBlock, - toBlock: rawContract.endBlock, + callType: "CALL", + functionSelector: registeredFunctionSelectors, + includeReverted: false, + // includeTransactionReceipts: + // rawContract.includeTransactionReceipts, + fromBlock, + toBlock, }, } satisfies ContractSource, ]; } return [logSource]; - } - - if (resolvedAddress !== undefined) { + } else if (resolvedAddress !== undefined) { for (const address of Array.isArray(resolvedAddress) ? resolvedAddress : [resolvedAddress]) { - if (!address.startsWith("0x")) + if (!address!.startsWith("0x")) throw new Error( - `Validation failed: Invalid prefix for address '${address}'. Got '${address.slice( + `Validation failed: Invalid prefix for address '${address}'. Got '${address!.slice( 0, 2, )}', expected '0x'.`, ); - if (address.length !== 42) + if (address!.length !== 42) throw new Error( - `Validation failed: Invalid length for address '${address}'. Got ${address.length}, expected 42 characters.`, + `Validation failed: Invalid length for address '${address}'. Got ${address!.length}, expected 42 characters.`, ); } } @@ -556,20 +582,23 @@ export async function buildConfigAndIndexingFunctions({ type: "log", chainId: network.chainId, address: validatedAddress, - topics, - includeTransactionReceipts: rawContract.includeTransactionReceipts, - fromBlock: rawContract.startBlock, - toBlock: rawContract.endBlock, + topic0, + topic1, + topic2, + topic3, + // includeTransactionReceipts: rawContract.includeTransactionReceipts, + fromBlock, + toBlock, }, } satisfies ContractSource; - if (rawContract.includeCallTraces) { + if (source.includeCallTraces) { return [ logSource, { ...contractMetadata, filter: { - type: "callTrace", + type: "trace", chainId: network.chainId, fromAddress: undefined, toAddress: Array.isArray(validatedAddress) @@ -577,145 +606,269 @@ export async function buildConfigAndIndexingFunctions({ : validatedAddress === undefined ? undefined : [validatedAddress], - functionSelectors: registeredFunctionSelectors, - includeTransactionReceipts: - rawContract.includeTransactionReceipts, - fromBlock: rawContract.startBlock, - toBlock: rawContract.endBlock, + callType: "CALL", + functionSelector: registeredFunctionSelectors, + includeReverted: false, + // includeTransactionReceipts: + // rawContract.includeTransactionReceipts, + fromBlock, + toBlock, }, } satisfies ContractSource, ]; } else return [logSource]; - }) - // Remove sources with no registered indexing functions + }) // Remove sources with no registered indexing functions .filter((source) => { const hasRegisteredIndexingFunctions = - source.filter.type === "callTrace" - ? source.filter.functionSelectors.length !== 0 - : source.filter.topics[0]?.length !== 0; + source.filter.type === "trace" + ? Array.isArray(source.filter.functionSelector) && + source.filter.functionSelector.length > 0 + : Array.isArray(source.filter.topic0) && + source.filter.topic0?.length > 0; if (!hasRegisteredIndexingFunctions) { logs.push({ level: "debug", msg: `No indexing functions were registered for '${ source.name - }' ${source.filter.type === "callTrace" ? "call traces" : "logs"}`, + }' ${source.filter.type === "trace" ? "traces" : "logs"}`, }); } return hasRegisteredIndexingFunctions; }); - const blockSources: BlockSource[] = Object.entries(config.blocks ?? {}) - .flatMap(([sourceName, blockSourceConfig]) => { - const startBlockMaybeNan = blockSourceConfig.startBlock ?? 0; - const startBlock = Number.isNaN(startBlockMaybeNan) - ? 0 + const accountSources: AccountSource[] = flattenSources(config.accounts ?? {}) + .flatMap((source): AccountSource[] => { + const network = networks.find((n) => n.name === source.network)!; + + const startBlockMaybeNan = source.startBlock; + const fromBlock = Number.isNaN(startBlockMaybeNan) + ? undefined : startBlockMaybeNan; - const endBlockMaybeNan = blockSourceConfig.endBlock; - const endBlock = Number.isNaN(endBlockMaybeNan) + const endBlockMaybeNan = source.endBlock; + const toBlock = Number.isNaN(endBlockMaybeNan) ? undefined : endBlockMaybeNan; - if (endBlock !== undefined && endBlock < startBlock) { + const resolvedFactory = source?.factory; + const resolvedAddress = source?.address; + + if (resolvedFactory !== undefined && resolvedAddress !== undefined) { throw new Error( - `Validation failed: Start block for block source '${sourceName}' is after end block (${startBlock} > ${endBlock}).`, + `Validation failed: Account '${source.name}' cannot specify both 'factory' and 'address' options.`, ); } - if (typeof blockSourceConfig.network === "string") { - const network = networks.find( - (n) => n.name === blockSourceConfig.network, + if (resolvedFactory === undefined && resolvedAddress === undefined) { + throw new Error( + `Validation failed: Account '${source.name}' must specify either 'factory' or 'address' options.`, ); - if (!network) { - throw new Error( - `Validation failed: Invalid network for block source '${sourceName}'. Got '${ - blockSourceConfig.network - }', expected one of [${networks.map((n) => `'${n.name}'`).join(", ")}].`, - ); - } + } - const intervalMaybeNan = blockSourceConfig.interval ?? 1; - const interval = Number.isNaN(intervalMaybeNan) ? 0 : intervalMaybeNan; + if (resolvedFactory) { + // Note that this can throw. + const logFactory = buildLogFactory({ + chainId: network.chainId, + ...resolvedFactory, + }); - if (!Number.isInteger(interval) || interval === 0) { + return [ + { + type: "account", + name: source.name, + networkName: source.network, + filter: { + type: "transaction", + chainId: network.chainId, + fromAddress: undefined, + toAddress: logFactory, + includeReverted: false, + fromBlock, + toBlock, + }, + } satisfies AccountSource, + { + type: "account", + name: source.name, + networkName: source.network, + filter: { + type: "transaction", + chainId: network.chainId, + fromAddress: logFactory, + toAddress: undefined, + includeReverted: false, + fromBlock, + toBlock, + }, + } satisfies AccountSource, + { + type: "account", + name: source.name, + networkName: source.network, + filter: { + type: "transfer", + chainId: network.chainId, + fromAddress: undefined, + toAddress: logFactory, + includeReverted: false, + fromBlock, + toBlock, + }, + } satisfies AccountSource, + { + type: "account", + name: source.name, + networkName: source.network, + filter: { + type: "transfer", + chainId: network.chainId, + fromAddress: logFactory, + toAddress: undefined, + includeReverted: false, + fromBlock, + toBlock, + }, + } satisfies AccountSource, + ]; + } + + for (const address of Array.isArray(resolvedAddress) + ? resolvedAddress + : [resolvedAddress]) { + if (!address!.startsWith("0x")) throw new Error( - `Validation failed: Invalid interval for block source '${sourceName}'. Got ${interval}, expected a non-zero integer.`, + `Validation failed: Invalid prefix for address '${address}'. Got '${address!.slice( + 0, + 2, + )}', expected '0x'.`, ); - } + if (address!.length !== 42) + throw new Error( + `Validation failed: Invalid length for address '${address}'. Got ${address!.length}, expected 42 characters.`, + ); + } - return { - type: "block", - name: sourceName, - networkName: blockSourceConfig.network, + const validatedAddress = Array.isArray(resolvedAddress) + ? resolvedAddress.map((r) => toLowerCase(r)) + : resolvedAddress !== undefined + ? toLowerCase(resolvedAddress) + : undefined; + + return [ + { + type: "account", + name: source.name, + + networkName: source.network, filter: { - type: "block", + type: "transaction", chainId: network.chainId, - interval: interval, - offset: startBlock % interval, - fromBlock: startBlock, - toBlock: endBlock, + fromAddress: undefined, + toAddress: validatedAddress, + includeReverted: false, + fromBlock, + toBlock, }, - } satisfies BlockSource; - } + } satisfies AccountSource, + { + type: "account", + name: source.name, + networkName: source.network, + filter: { + type: "transaction", + chainId: network.chainId, + fromAddress: validatedAddress, + toAddress: undefined, + includeReverted: false, + fromBlock, + toBlock, + }, + } satisfies AccountSource, + { + type: "account", + name: source.name, + networkName: source.network, + filter: { + type: "transfer", + chainId: network.chainId, + fromAddress: undefined, + toAddress: validatedAddress, + includeReverted: false, + fromBlock, + toBlock, + }, + } satisfies AccountSource, + { + type: "account", + name: source.name, + networkName: source.network, + filter: { + type: "transfer", + chainId: network.chainId, + fromAddress: validatedAddress, + toAddress: undefined, + includeReverted: false, + fromBlock, + toBlock, + }, + } satisfies AccountSource, + ]; + }) + .filter((source) => { + const eventName = + source.filter.type === "transaction" + ? source.filter.fromAddress === undefined + ? `${source.name}:transaction:to` + : `${source.name}:transaction:from` + : source.filter.fromAddress === undefined + ? `${source.name}:transfer:to` + : `${source.name}:transfer:from`; - type DefinedNetworkOverride = NonNullable< - Exclude[string] - >; + const hasRegisteredIndexingFunction = + indexingFunctions[eventName] !== undefined; + if (!hasRegisteredIndexingFunction) { + logs.push({ + level: "debug", + msg: `No indexing functions were registered for '${eventName}'`, + }); + } + return hasRegisteredIndexingFunction; + }); - return Object.entries(blockSourceConfig.network) - .filter((n): n is [string, DefinedNetworkOverride] => !!n[1]) - .map(([networkName, overrides]) => { - const network = networks.find((n) => n.name === networkName); - if (!network) { - throw new Error( - `Validation failed: Invalid network for block source '${sourceName}'. Got '${networkName}', expected one of [${networks - .map((n) => `'${n.name}'`) - .join(", ")}].`, - ); - } + const blockSources: BlockSource[] = flattenSources(config.blocks ?? {}) + .map((source) => { + const network = networks.find((n) => n.name === source.network)!; - const startBlockMaybeNan = - overrides.startBlock ?? blockSourceConfig.startBlock ?? 0; - const startBlock = Number.isNaN(startBlockMaybeNan) - ? 0 - : startBlockMaybeNan; - const endBlockMaybeNan = - overrides.endBlock ?? blockSourceConfig.endBlock; - const endBlock = Number.isNaN(endBlockMaybeNan) - ? undefined - : endBlockMaybeNan; - - if (endBlock !== undefined && endBlock < startBlock) { - throw new Error( - `Validation failed: Start block for block source '${sourceName}' is after end block (${startBlock} > ${endBlock}).`, - ); - } + const intervalMaybeNan = source.interval ?? 1; + const interval = Number.isNaN(intervalMaybeNan) ? 0 : intervalMaybeNan; - const intervalMaybeNan = - overrides.interval ?? blockSourceConfig.interval ?? 0; - const interval = Number.isNaN(intervalMaybeNan) - ? 0 - : intervalMaybeNan; + if (!Number.isInteger(interval) || interval === 0) { + throw new Error( + `Validation failed: Invalid interval for block source '${source.name}'. Got ${interval}, expected a non-zero integer.`, + ); + } - if (!Number.isInteger(interval) || interval === 0) { - throw new Error( - `Validation failed: Invalid interval for block source '${sourceName}'. Got ${interval}, expected a non-zero integer.`, - ); - } + const startBlockMaybeNan = source.startBlock; + const fromBlock = Number.isNaN(startBlockMaybeNan) + ? undefined + : startBlockMaybeNan; + const endBlockMaybeNan = source.endBlock; + const toBlock = Number.isNaN(endBlockMaybeNan) + ? undefined + : endBlockMaybeNan; - return { - type: "block", - name: sourceName, - networkName, - filter: { - type: "block", - chainId: network.chainId, - interval: interval, - offset: startBlock % interval, - fromBlock: startBlock, - toBlock: endBlock, - }, - } satisfies BlockSource; - }); + return { + type: "block", + name: source.name, + networkName: source.network, + filter: { + type: "block", + chainId: network.chainId, + interval: interval, + offset: (fromBlock ?? 0) % interval, + fromBlock, + toBlock, + }, + } satisfies BlockSource; }) .filter((blockSource) => { const hasRegisteredIndexingFunction = @@ -729,7 +882,7 @@ export async function buildConfigAndIndexingFunctions({ return hasRegisteredIndexingFunction; }); - const sources = [...contractSources, ...blockSources]; + const sources = [...contractSources, ...accountSources, ...blockSources]; // Filter out any networks that don't have any sources registered. const networksWithSources = networks.filter((network) => { diff --git a/packages/core/src/config/config.test-d.ts b/packages/core/src/config/config.test-d.ts index f930aa225..55dc283ad 100644 --- a/packages/core/src/config/config.test-d.ts +++ b/packages/core/src/config/config.test-d.ts @@ -301,3 +301,24 @@ test("createConfig strict return type", () => { }; }>(config.contracts); }); + +test("createConfig accounts", () => { + createConfig({ + networks: { + mainnet: { + chainId: 1, + transport: http(), + }, + optimism: { + chainId: 10, + transport: http(), + }, + }, + accounts: { + me: { + network: "mainnet", + address: ["0x"], + }, + }, + }); +}); diff --git a/packages/core/src/config/config.ts b/packages/core/src/config/config.ts index 4094fe35f..49d56cbd7 100644 --- a/packages/core/src/config/config.ts +++ b/packages/core/src/config/config.ts @@ -5,13 +5,43 @@ import type { GetAddress } from "./address.js"; import type { GetEventFilter } from "./eventFilter.js"; import type { NonStrictPick } from "./utilityTypes.js"; -export type BlockConfig = { - /** Block number at which to start indexing events (inclusive). If `undefined`, events will be processed from block 0. Default: `undefined`. */ - startBlock?: number; - /** Block number at which to stop indexing events (inclusive). If `undefined`, events will be processed in real-time. Default: `undefined`. */ - endBlock?: number; +export type Config = { + networks: { [networkName: string]: NetworkConfig }; + contracts: { [contractName: string]: GetContract }; + accounts: { [accountName: string]: AccountConfig }; + database?: DatabaseConfig; + blocks: { + [sourceName: string]: GetBlockFilter; + }; }; +export type CreateConfigReturnType = { + networks: networks; + contracts: contracts; + accounts: accounts; + database?: DatabaseConfig; + blocks: blocks; +}; + +export const createConfig = < + const networks, + const contracts = {}, + const accounts = {}, + const blocks = {}, +>(config: { + database?: DatabaseConfig; + // TODO: add jsdoc to these properties. + networks: NetworksConfig>; + contracts?: ContractsConfig>; + accounts?: AccountsConfig>; + blocks?: BlockFiltersConfig; +}): CreateConfigReturnType => + config as Prettify< + CreateConfigReturnType + >; + +// database + type DatabaseConfig = | { kind: "pglite"; @@ -29,7 +59,31 @@ type DatabaseConfig = }; }; -export type NetworkConfig = { +// base + +type BlockConfig = { + /** Block number at which to start indexing events (inclusive). If `undefined`, events will be processed from block 0. Default: `undefined`. */ + startBlock?: number; + /** Block number at which to stop indexing events (inclusive). If `undefined`, events will be processed in real-time. Default: `undefined`. */ + endBlock?: number; +}; + +type TransactionReceiptConfig = { + includeTransactionReceipts?: boolean; +}; + +type FunctionCallConfig = { + /* + * Enable call trace indexing for this contract. + * + * - Docs: https://ponder.sh/docs/indexing/call-traces + */ + includeCallTraces?: boolean; +}; + +// network + +type NetworkConfig = { /** Chain ID of the network. */ chainId: network extends { chainId: infer chainId extends number } ? chainId | number @@ -57,48 +111,20 @@ export type NetworkConfig = { disableCache?: boolean; }; -export type BlockFilterConfig = { - /** Block number at which to start indexing events (inclusive). If `undefined`, events will be processed from block 0. Default: `undefined`. */ - startBlock?: number; - /** Block number at which to stop indexing events (inclusive). If `undefined`, events will be processed in real-time. Default: `undefined`. */ - endBlock?: number; - interval?: number; -}; +type NetworksConfig = {} extends networks + ? {} + : { + [networkName in keyof networks]: NetworkConfig; + }; -type GetBlockFilter< - networks, - /// - allNetworkNames extends string = [keyof networks] extends [never] - ? string - : keyof networks & string, -> = BlockFilterConfig & { - network: - | allNetworkNames - | { - [name in allNetworkNames]?: BlockFilterConfig; - }; -}; +// contracts type AbiConfig = { /** Contract application byte interface. */ abi: abi; }; -type TransactionReceiptConfig = { - includeTransactionReceipts?: boolean; -}; - -type FunctionCallConfig = { - /* - * Enable call trace indexing for this contract. - * - * - Docs: https://ponder.sh/docs/indexing/call-traces - */ - - includeCallTraces?: boolean; -}; - -type GetNetwork< +type GetContractNetwork< networks, contract, abi extends Abi, @@ -146,7 +172,7 @@ type GetNetwork< type ContractConfig = Prettify< AbiConfig & - GetNetwork, abi> & + GetContractNetwork, abi> & GetAddress> & GetEventFilter> & TransactionReceiptConfig & @@ -169,12 +195,84 @@ type ContractsConfig = {} extends contracts [name in keyof contracts]: GetContract; }; -type NetworksConfig = {} extends networks +// accounts + +type GetAccountNetwork< + networks, + account, + /// + allNetworkNames extends string = [keyof networks] extends [never] + ? string + : keyof networks & string, +> = account extends { network: infer network } + ? { + /** + * Network that this account is deployed to. Must match a network name in `networks`. + * Any filter information overrides the values in the higher level "accounts" property. + * Factories cannot override an address and vice versa. + */ + network: + | allNetworkNames + | { + [name in allNetworkNames]?: Prettify< + GetAddress> & + TransactionReceiptConfig & + BlockConfig + >; + }; + } + : { + /** + * Network that this account is deployed to. Must match a network name in `networks`. + * Any filter information overrides the values in the higher level "accounts" property. + * Factories cannot override an address and vice versa. + */ + network: + | allNetworkNames + | { + [name in allNetworkNames]?: Prettify< + GetAddress & TransactionReceiptConfig & BlockConfig + >; + }; + }; + +type AccountConfig = Prettify< + GetAccountNetwork> & + GetAddress> & + TransactionReceiptConfig & + BlockConfig +>; + +type AccountsConfig = {} extends accounts ? {} : { - [networkName in keyof networks]: NetworkConfig; + [name in keyof accounts]: AccountConfig; }; +// blocks + +type BlockFilterConfig = { + /** Block number at which to start indexing events (inclusive). If `undefined`, events will be processed from block 0. Default: `undefined`. */ + startBlock?: number; + /** Block number at which to stop indexing events (inclusive). If `undefined`, events will be processed in real-time. Default: `undefined`. */ + endBlock?: number; + interval?: number; +}; + +type GetBlockFilter< + networks, + /// + allNetworkNames extends string = [keyof networks] extends [never] + ? string + : keyof networks & string, +> = BlockFilterConfig & { + network: + | allNetworkNames + | { + [name in allNetworkNames]?: BlockFilterConfig; + }; +}; + type BlockFiltersConfig< networks = unknown, blocks = unknown, @@ -183,32 +281,3 @@ type BlockFiltersConfig< : { [name in keyof blocks]: GetBlockFilter; }; - -export const createConfig = < - const networks, - const contracts = {}, - const blocks = {}, ->(config: { - // TODO: add jsdoc to these properties. - networks: NetworksConfig>; - contracts?: ContractsConfig>; - database?: DatabaseConfig; - blocks?: BlockFiltersConfig; -}): CreateConfigReturnType => - config as Prettify>; - -export type Config = { - networks: { [networkName: string]: NetworkConfig }; - contracts: { [contractName: string]: GetContract }; - database?: DatabaseConfig; - blocks: { - [sourceName: string]: GetBlockFilter; - }; -}; - -export type CreateConfigReturnType = { - networks: networks; - contracts: contracts; - database?: DatabaseConfig; - blocks: blocks; -}; diff --git a/packages/core/src/indexing/service.test.ts b/packages/core/src/indexing/service.test.ts index e46d9c038..0d7a2105f 100644 --- a/packages/core/src/indexing/service.test.ts +++ b/packages/core/src/indexing/service.test.ts @@ -1,4 +1,4 @@ -import { BOB } from "@/_test/constants.js"; +import { ALICE, BOB } from "@/_test/constants.js"; import { erc20ABI } from "@/_test/generated.js"; import { setupAnvil, @@ -6,20 +6,17 @@ import { setupDatabaseServices, setupIsolatedDatabase, } from "@/_test/setup.js"; -import { getEventsBlock, getEventsLog, getEventsTrace } from "@/_test/utils.js"; +import { deployErc20, mintErc20 } from "@/_test/simulate.js"; +import { getErc20ConfigAndIndexingFunctions } from "@/_test/utils.js"; +import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; import { onchainTable } from "@/drizzle/index.js"; import { createSync } from "@/sync/index.js"; import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js"; import { promiseWithResolvers } from "@ponder/common"; -import { - type Address, - checksumAddress, - parseEther, - toHex, - zeroAddress, -} from "viem"; +import { checksumAddress, padHex, parseEther, toHex, zeroAddress } from "viem"; +import { encodeEventTopics } from "viem/utils"; import { beforeEach, expect, test, vi } from "vitest"; -import { decodeEvents } from "../sync/events.js"; +import { type RawEvent, decodeEvents } from "../sync/events.js"; import { type Context, create, @@ -40,8 +37,20 @@ const account = onchainTable("account", (p) => ({ const schema = { account }; +const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address: zeroAddress, +}); +const { sources, networks } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, +}); + test("createIndexing()", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -74,7 +83,7 @@ test("createIndexing()", async (context) => { }); test("processSetupEvents() empty", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -111,7 +120,7 @@ test("processSetupEvents() empty", async (context) => { }); test("processSetupEvents()", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -155,23 +164,10 @@ test("processSetupEvents()", async (context) => { contracts: { Erc20: { abi: expect.any(Object), - address: checksumAddress(sources[0].filter.address as Address), - startBlock: sources[0].filter.fromBlock, - endBlock: sources[0].filter.toBlock, - }, - Pair: { - abi: expect.any(Object), - address: undefined, - startBlock: sources[1].filter.fromBlock, - endBlock: sources[1].filter.toBlock, - }, - Factory: { - abi: expect.any(Object), - address: checksumAddress( - sources[2].filter.toAddress.address as Address, - ), - startBlock: sources[2].filter.fromBlock, - endBlock: sources[2].filter.toBlock, + // @ts-ignore + address: checksumAddress(sources[0]!.filter.address), + startBlock: sources[0]!.filter.fromBlock, + endBlock: sources[0]!.filter.toBlock, }, }, client: expect.any(Object), @@ -182,8 +178,8 @@ test("processSetupEvents()", async (context) => { await cleanup(); }); -test("processEvent() log events", async (context) => { - const { common, sources, networks } = context; +test("processEvent()", async (context) => { + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -215,8 +211,31 @@ test("processEvent() log events", async (context) => { setIndexingStore(indexingService, indexingStore); - const rawEvents = await getEventsLog(sources); - const events = decodeEvents(common, sources, rawEvents); + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + const data = padHex(toHex(parseEther("1")), { size: 32 }); + + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]); const result = await processEvents(indexingService, { events, }); @@ -226,7 +245,7 @@ test("processEvent() log events", async (context) => { indexingFunctions[ "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)" ], - ).toHaveBeenCalledTimes(2); + ).toHaveBeenCalledTimes(1); expect( indexingFunctions[ "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)" @@ -238,30 +257,17 @@ test("processEvent() log events", async (context) => { log: expect.any(Object), block: expect.any(Object), transaction: expect.any(Object), - transactionReceipt: expect.any(Object), + transactionReceipt: undefined, }, context: { network: { chainId: 1, name: "mainnet" }, contracts: { Erc20: { abi: expect.any(Object), - address: checksumAddress(sources[0].filter.address as Address), - startBlock: sources[0].filter.fromBlock, - endBlock: sources[0].filter.toBlock, - }, - Pair: { - abi: expect.any(Object), - address: undefined, - startBlock: sources[1].filter.fromBlock, - endBlock: sources[1].filter.toBlock, - }, - Factory: { - abi: expect.any(Object), - address: checksumAddress( - sources[2].filter.toAddress.address as Address, - ), - startBlock: sources[2].filter.fromBlock, - endBlock: sources[2].filter.toBlock, + // @ts-ignore + address: checksumAddress(sources[0]!.filter.address), + startBlock: sources[0]!.filter.fromBlock, + endBlock: sources[0]!.filter.toBlock, }, }, client: expect.any(Object), @@ -272,8 +278,8 @@ test("processEvent() log events", async (context) => { await cleanup(); }); -test("processEvents() block events", async (context) => { - const { common, sources, networks } = context; +test("processEvents killed", async (context) => { + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -290,7 +296,9 @@ test("processEvents() block events", async (context) => { }); const indexingFunctions = { - "OddBlocks:block": vi.fn(), + "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": + vi.fn(), + "Pair:Swap": vi.fn(), }; const indexingService = create({ @@ -302,167 +310,33 @@ test("processEvents() block events", async (context) => { }); setIndexingStore(indexingService, indexingStore); + kill(indexingService); - const rawEvents = await getEventsBlock(sources); - const events = decodeEvents(common, sources, rawEvents); - const result = await processEvents(indexingService, { - events, - }); - expect(result).toStrictEqual({ status: "success" }); - - expect(indexingFunctions["OddBlocks:block"]).toHaveBeenCalledTimes(1); - expect(indexingFunctions["OddBlocks:block"]).toHaveBeenCalledWith({ - event: { - block: expect.any(Object), - }, - context: { - network: { chainId: 1, name: "mainnet" }, - contracts: { - Erc20: { - abi: expect.any(Object), - address: checksumAddress(sources[0].filter.address as Address), - startBlock: sources[0].filter.fromBlock, - endBlock: sources[0].filter.toBlock, - }, - Pair: { - abi: expect.any(Object), - address: undefined, - startBlock: sources[1].filter.fromBlock, - endBlock: sources[1].filter.toBlock, - }, - Factory: { - abi: expect.any(Object), - address: checksumAddress( - sources[2].filter.toAddress.address as Address, - ), - startBlock: sources[2].filter.fromBlock, - endBlock: sources[2].filter.toBlock, - }, - }, - client: expect.any(Object), - db: expect.any(Object), + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, }, }); - await cleanup(); -}); - -test("processEvents() call trace events", async (context) => { - const { common, sources, networks } = context; - const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( - context, - { schema }, - ); - - const sync = await createSync({ - common, - syncStore, - networks, - sources, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - - const indexingFunctions = { - "Factory.createPair()": vi.fn(), - }; + const data = padHex(toHex(parseEther("1")), { size: 32 }); - const indexingService = create({ - indexingFunctions, - common, - sources, - networks, - sync, - }); - - setIndexingStore(indexingService, indexingStore); - - const rawEvents = await getEventsTrace(sources); - const events = decodeEvents(common, sources, rawEvents); - const result = await processEvents(indexingService, { - events, - }); - expect(result).toStrictEqual({ status: "success" }); - - expect(indexingFunctions["Factory.createPair()"]).toHaveBeenCalledTimes(1); - expect(indexingFunctions["Factory.createPair()"]).toHaveBeenCalledWith({ - event: { - args: undefined, - result: expect.any(String), - block: expect.any(Object), - trace: expect.any(Object), - transaction: expect.any(Object), - transactionReceipt: expect.any(Object), + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, }, - context: { - network: { chainId: 1, name: "mainnet" }, - contracts: { - Erc20: { - abi: expect.any(Object), - address: checksumAddress(sources[0].filter.address as Address), - startBlock: sources[0].filter.fromBlock, - endBlock: sources[0].filter.toBlock, - }, - Pair: { - abi: expect.any(Object), - address: undefined, - startBlock: sources[1].filter.fromBlock, - endBlock: sources[1].filter.toBlock, - }, - Factory: { - abi: expect.any(Object), - address: checksumAddress( - sources[2].filter.toAddress.address as Address, - ), - startBlock: sources[2].filter.fromBlock, - endBlock: sources[2].filter.toBlock, - }, - }, - client: expect.any(Object), - db: expect.any(Object), - }, - }); - - await cleanup(); -}); - -test("processEvents killed", async (context) => { - const { common, sources, networks } = context; - const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( - context, - { schema }, - ); - - const sync = await createSync({ - common, - syncStore, - networks, - sources, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); + } as RawEvent; - const indexingFunctions = { - "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": - vi.fn(), - "Pair:Swap": vi.fn(), - }; - - const indexingService = create({ - indexingFunctions, - common, - sources, - networks, - sync, - }); - - setIndexingStore(indexingService, indexingStore); - kill(indexingService); - - const rawEvents = await getEventsLog(sources); - const events = decodeEvents(common, sources, rawEvents); + const events = decodeEvents(common, sources, [rawEvent]); const result = await processEvents(indexingService, { events, }); @@ -478,7 +352,7 @@ test("processEvents killed", async (context) => { }); test("processEvents eventCount", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -497,7 +371,6 @@ test("processEvents eventCount", async (context) => { const indexingFunctions = { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": vi.fn(), - "Pair:Swap": vi.fn(), }; const indexingService = create({ @@ -510,23 +383,45 @@ test("processEvents eventCount", async (context) => { setIndexingStore(indexingService, indexingStore); - const rawEvents = await getEventsLog(sources); - const events = decodeEvents(common, sources, rawEvents); + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + const data = padHex(toHex(parseEther("1")), { size: 32 }); + + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]); const result = await processEvents(indexingService, { events, }); expect(result).toStrictEqual({ status: "success" }); expect(indexingService.eventCount).toStrictEqual({ - "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": 2, - "Pair:Swap": 1, + "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": 1, }); await cleanup(); }); test("executeSetup() context.client", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -581,7 +476,7 @@ test("executeSetup() context.client", async (context) => { }); test("executeSetup() context.db", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -636,7 +531,7 @@ test("executeSetup() context.db", async (context) => { }); test("executeSetup() metrics", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -677,7 +572,7 @@ test("executeSetup() metrics", async (context) => { }); test("executeSetup() error", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -721,7 +616,7 @@ test("executeSetup() error", async (context) => { }); test("processEvents() context.client", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -747,9 +642,6 @@ test("processEvents() context.client", async (context) => { indexingFunctions: { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": clientCall, - "Pair:Swap": clientCall, - "OddBlocks:block": clientCall, - "Factory.createPair()": clientCall, }, common, sources, @@ -764,18 +656,37 @@ test("processEvents() context.client", async (context) => { "getBalance", ); - const rawEvents = [ - ...(await getEventsLog(sources)), - ...(await getEventsBlock(sources)), - ...(await getEventsTrace(sources)), - ]; - const events = decodeEvents(common, sources, rawEvents); + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + const data = padHex(toHex(parseEther("1")), { size: 32 }); + + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]); const result = await processEvents(indexingService, { events, }); expect(result).toStrictEqual({ status: "success" }); - expect(getBalanceSpy).toHaveBeenCalledTimes(5); + expect(getBalanceSpy).toHaveBeenCalledTimes(1); expect(getBalanceSpy).toHaveBeenCalledWith({ address: BOB, }); @@ -784,7 +695,7 @@ test("processEvents() context.client", async (context) => { }); test("processEvents() context.db", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema, instanceId: "1234" }, @@ -813,9 +724,6 @@ test("processEvents() context.db", async (context) => { indexingFunctions: { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": dbCall, - "Pair:Swap": dbCall, - "OddBlocks:block": dbCall, - "Factory.createPair()": dbCall, }, common, sources, @@ -827,28 +735,47 @@ test("processEvents() context.db", async (context) => { const insertSpy = vi.spyOn(indexingService.currentEvent.context.db, "insert"); - const rawEvents = [ - ...(await getEventsLog(sources)), - ...(await getEventsBlock(sources)), - ...(await getEventsTrace(sources)), - ]; - const events = decodeEvents(common, sources, rawEvents); + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + const data = padHex(toHex(parseEther("1")), { size: 32 }); + + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]); const result = await processEvents(indexingService, { events, }); expect(result).toStrictEqual({ status: "success" }); - expect(insertSpy).toHaveBeenCalledTimes(5); + expect(insertSpy).toHaveBeenCalledTimes(1); const transferEvents = await indexingStore.sql.select().from(account); - expect(transferEvents).toHaveLength(5); + expect(transferEvents).toHaveLength(1); await cleanup(); }); test("processEvents() metrics", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -868,9 +795,6 @@ test("processEvents() metrics", async (context) => { indexingFunctions: { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": vi.fn(), - "Pair:Swap": vi.fn(), - "OddBlocks:block": vi.fn(), - "Factory.createPair()": vi.fn(), }, common, sources, @@ -880,12 +804,31 @@ test("processEvents() metrics", async (context) => { setIndexingStore(indexingService, indexingStore); - const rawEvents = [ - ...(await getEventsLog(sources)), - ...(await getEventsBlock(sources)), - ...(await getEventsTrace(sources)), - ]; - const events = decodeEvents(common, sources, rawEvents); + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + const data = padHex(toHex(parseEther("1")), { size: 32 }); + + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]); await processEvents(indexingService, { events, }); @@ -897,7 +840,7 @@ test("processEvents() metrics", async (context) => { }); test("processEvents() error", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -916,9 +859,6 @@ test("processEvents() error", async (context) => { const indexingFunctions = { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": vi.fn(), - "Pair:Swap": vi.fn(), - "OddBlocks:block": vi.fn(), - "Factory.createPair()": vi.fn(), }; const indexingService = create({ @@ -935,12 +875,31 @@ test("processEvents() error", async (context) => { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)" ].mockRejectedValue(new Error()); - const rawEvents = [ - ...(await getEventsLog(sources)), - ...(await getEventsBlock(sources)), - ...(await getEventsTrace(sources)), - ]; - const events = decodeEvents(common, sources, rawEvents); + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + const data = padHex(toHex(parseEther("1")), { size: 32 }); + + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]); const result = await processEvents(indexingService, { events, }); @@ -954,15 +913,12 @@ test("processEvents() error", async (context) => { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)" ], ).toHaveBeenCalledTimes(1); - expect(indexingFunctions["Pair:Swap"]).toHaveBeenCalledTimes(0); - expect(indexingFunctions["OddBlocks:block"]).toHaveBeenCalledTimes(0); - expect(indexingFunctions["Factory.createPair()"]).toHaveBeenCalledTimes(0); await cleanup(); }); test("execute() error after killed", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -994,8 +950,31 @@ test("execute() error after killed", async (context) => { setIndexingStore(indexingService, indexingStore); - const rawEvents = await getEventsLog(sources); - const events = decodeEvents(common, sources, rawEvents); + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + const data = padHex(toHex(parseEther("1")), { size: 32 }); + + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]); const resultPromise = processEvents(indexingService, { events }); kill(indexingService); @@ -1008,7 +987,7 @@ test("execute() error after killed", async (context) => { }); test("ponderActions getBalance()", async (context) => { - const { common, sources, networks } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, @@ -1044,12 +1023,14 @@ test("ponderActions getBalance()", async (context) => { }); test("ponderActions getCode()", async (context) => { - const { common, sources, networks, erc20 } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, ); + const { address } = await deployErc20({ sender: ALICE }); + const sync = await createSync({ common, syncStore, @@ -1071,7 +1052,7 @@ test("ponderActions getCode()", async (context) => { setIndexingStore(indexingService, indexingStore); const bytecode = await indexingService.clientByChainId[1]!.getCode({ - address: erc20.address, + address, }); expect(bytecode).toBeTruthy(); @@ -1080,12 +1061,20 @@ test("ponderActions getCode()", async (context) => { }); test("ponderActions getStorageAt()", async (context) => { - const { common, sources, networks, erc20 } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, ); + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const sync = await createSync({ common, syncStore, @@ -1107,7 +1096,7 @@ test("ponderActions getStorageAt()", async (context) => { setIndexingStore(indexingService, indexingStore); const storage = await indexingService.clientByChainId[1]!.getStorageAt({ - address: erc20.address, + address, // totalSupply is in the third storage slot slot: toHex(2), }); @@ -1118,12 +1107,20 @@ test("ponderActions getStorageAt()", async (context) => { }); test("ponderActions readContract()", async (context) => { - const { common, sources, networks, erc20 } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, ); + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const sync = await createSync({ common, syncStore, @@ -1147,7 +1144,7 @@ test("ponderActions readContract()", async (context) => { const totalSupply = await indexingService.clientByChainId[1]!.readContract({ abi: erc20ABI, functionName: "totalSupply", - address: erc20.address, + address, }); expect(totalSupply).toBe(parseEther("1")); @@ -1156,12 +1153,20 @@ test("ponderActions readContract()", async (context) => { }); test("ponderActions readContract() blockNumber", async (context) => { - const { common, sources, networks, erc20 } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, ); + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const sync = await createSync({ common, syncStore, @@ -1185,7 +1190,7 @@ test("ponderActions readContract() blockNumber", async (context) => { const totalSupply = await indexingService.clientByChainId[1]!.readContract({ abi: erc20ABI, functionName: "totalSupply", - address: erc20.address, + address, blockNumber: 1n, }); @@ -1196,12 +1201,20 @@ test("ponderActions readContract() blockNumber", async (context) => { // Note: Kyle the local chain doesn't have a deployed instance of "multicall3" test.skip("ponderActions multicall()", async (context) => { - const { common, sources, networks, erc20 } = context; + const { common } = context; const { syncStore, indexingStore, cleanup } = await setupDatabaseServices( context, { schema }, ); + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const sync = await createSync({ common, syncStore, @@ -1228,7 +1241,7 @@ test.skip("ponderActions multicall()", async (context) => { { abi: erc20ABI, functionName: "totalSupply", - address: erc20.address, + address, }, ], }); diff --git a/packages/core/src/indexing/service.ts b/packages/core/src/indexing/service.ts index 501d09220..267bc7713 100644 --- a/packages/core/src/indexing/service.ts +++ b/packages/core/src/indexing/service.ts @@ -16,18 +16,11 @@ import { encodeCheckpoint, zeroCheckpoint, } from "@/utils/checkpoint.js"; -import { never } from "@/utils/never.js"; import { prettyPrint } from "@/utils/print.js"; import { startClock } from "@/utils/timer.js"; import type { Abi, Address } from "viem"; import { checksumAddress, createClient } from "viem"; -import type { - BlockEvent, - CallTraceEvent, - Event, - LogEvent, - SetupEvent, -} from "../sync/events.js"; +import type { Event, SetupEvent } from "../sync/events.js"; import { addStackTrace } from "./addStackTrace.js"; import { type ReadOnlyClient, getPonderActions } from "./ponderActions.js"; @@ -40,7 +33,7 @@ export type Context = { { abi: Abi; address?: Address | readonly Address[]; - startBlock: number; + startBlock?: number; endBlock?: number; } >; @@ -105,7 +98,7 @@ export const create = ({ // build contractsByChainId for (const source of sources) { - if (source.type === "block") continue; + if (source.type === "block" || source.type === "account") continue; let address: Address | undefined; @@ -216,12 +209,12 @@ export const processSetupEvents = async ( checkpoint: encodeCheckpoint({ ...zeroCheckpoint, chainId: BigInt(network.chainId), - blockNumber: BigInt(source.filter.fromBlock), + blockNumber: BigInt(source.filter.fromBlock ?? 0), }), name: eventName, - block: BigInt(source.filter.fromBlock), + block: BigInt(source.filter.fromBlock ?? 0), }, }); @@ -247,74 +240,23 @@ export const processEvents = async ( const event = events[i]!; - switch (event.type) { - case "log": { - indexingService.eventCount[event.name]!++; + indexingService.eventCount[event.name]!++; - indexingService.common.logger.trace({ - service: "indexing", - msg: `Started indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, - }); - - const result = await executeLog(indexingService, { event }); - if (result.status !== "success") { - return result; - } - - indexingService.common.logger.trace({ - service: "indexing", - msg: `Completed indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, - }); - - break; - } - - case "block": { - indexingService.eventCount[event.name]!++; - - indexingService.common.logger.trace({ - service: "indexing", - msg: `Started indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, - }); - - const result = await executeBlock(indexingService, { event }); - if (result.status !== "success") { - return result; - } - - indexingService.common.logger.trace({ - service: "indexing", - msg: `Completed indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, - }); - - break; - } - - case "callTrace": { - indexingService.eventCount[event.name]!++; - - indexingService.common.logger.trace({ - service: "indexing", - msg: `Started indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, - }); - - const result = await executeCallTrace(indexingService, { event }); - if (result.status !== "success") { - return result; - } - - indexingService.common.logger.trace({ - service: "indexing", - msg: `Completed indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, - }); - - break; - } + indexingService.common.logger.trace({ + service: "indexing", + msg: `Started indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, + }); - default: - never(event); + const result = await executeEvent(indexingService, { event }); + if (result.status !== "success") { + return result; } + indexingService.common.logger.trace({ + service: "indexing", + msg: `Completed indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, + }); + // periodically update metrics if (i % 93 === 0) { updateCompletedEvents(indexingService); @@ -455,143 +397,37 @@ const executeSetup = async ( return { status: "success" }; }; -const executeLog = async ( - indexingService: Service, - { event }: { event: LogEvent }, -): Promise< - | { status: "error"; error: Error } - | { status: "success" } - | { status: "killed" } -> => { - const { - common, - indexingFunctions, - currentEvent, - networkByChainId, - contractsByChainId, - clientByChainId, - } = indexingService; - const indexingFunction = indexingFunctions[event.name]; - const metricLabel = { event: event.name }; - - try { - // set currentEvent - currentEvent.context.network.chainId = event.chainId; - currentEvent.context.network.name = networkByChainId[event.chainId]!.name; - currentEvent.context.client = clientByChainId[event.chainId]!; - currentEvent.context.contracts = contractsByChainId[event.chainId]!; - currentEvent.contextState.blockNumber = event.event.block.number; - - const endClock = startClock(); - - await indexingFunction!({ - event: event.event, - context: currentEvent.context, - }); - - common.metrics.ponder_indexing_function_duration.observe( - metricLabel, - endClock(), - ); - } catch (_error) { - if (indexingService.isKilled) return { status: "killed" }; - const error = _error as Error & { meta?: string[] }; - - const decodedCheckpoint = decodeCheckpoint(event.checkpoint); - - addStackTrace(error, common.options); - - error.meta = Array.isArray(error.meta) ? error.meta : []; - if (error.meta.length === 0) { - error.meta.push(`Event arguments:\n${prettyPrint(event.event.args)}`); +const toErrorMeta = (event: Event) => { + switch (event.type) { + case "log": + case "trace": { + return `Event arguments:\n${prettyPrint(event.event.args)}`; } - common.logger.error({ - service: "indexing", - msg: `Error while processing '${event.name}' event in '${networkByChainId[event.chainId]!.name}' block ${decodedCheckpoint.blockNumber}`, - error, - }); - - common.metrics.ponder_indexing_has_error.set(1); - - return { status: "error", error }; - } - - return { status: "success" }; -}; - -const executeBlock = async ( - indexingService: Service, - { event }: { event: BlockEvent }, -): Promise< - | { status: "error"; error: Error } - | { status: "success" } - | { status: "killed" } -> => { - const { - common, - indexingFunctions, - currentEvent, - networkByChainId, - contractsByChainId, - clientByChainId, - } = indexingService; - const indexingFunction = indexingFunctions[event.name]; - const metricLabel = { event: event.name }; - - try { - // set currentEvent - currentEvent.context.network.chainId = event.chainId; - currentEvent.context.network.name = networkByChainId[event.chainId]!.name; - currentEvent.context.client = clientByChainId[event.chainId]!; - currentEvent.context.contracts = contractsByChainId[event.chainId]!; - currentEvent.contextState.blockNumber = event.event.block.number; - - const endClock = startClock(); - - await indexingFunction!({ - event: event.event, - context: currentEvent.context, - }); - - common.metrics.ponder_indexing_function_duration.observe( - metricLabel, - endClock(), - ); - } catch (_error) { - if (indexingService.isKilled) return { status: "killed" }; - const error = _error as Error & { meta?: string[] }; - - const decodedCheckpoint = decodeCheckpoint(event.checkpoint); - - addStackTrace(error, common.options); + case "transfer": { + return `Event arguments:\n${prettyPrint(event.event.transfer)}`; + } - error.meta = Array.isArray(error.meta) ? error.meta : []; - error.meta.push( - `Block:\n${prettyPrint({ + case "block": { + return `Block:\n${prettyPrint({ hash: event.event.block.hash, number: event.event.block.number, timestamp: event.event.block.timestamp, - })}`, - ); - - common.logger.error({ - service: "indexing", - msg: `Error while processing ${event.name} event at chainId=${decodedCheckpoint.chainId}, block=${decodedCheckpoint.blockNumber}`, - error, - }); - - common.metrics.ponder_indexing_has_error.set(1); + })}`; + } - return { status: "error", error: error }; + case "transaction": { + return `Transaction:\n${prettyPrint({ + hash: event.event.transaction.hash, + block: event.event.block.number, + })}`; + } } - - return { status: "success" }; }; -const executeCallTrace = async ( +const executeEvent = async ( indexingService: Service, - { event }: { event: CallTraceEvent }, + { event }: { event: Event }, ): Promise< | { status: "error"; error: Error } | { status: "success" } @@ -636,7 +472,9 @@ const executeCallTrace = async ( addStackTrace(error, common.options); error.meta = Array.isArray(error.meta) ? error.meta : []; - error.meta.push(`Call trace arguments:\n${prettyPrint(event.event.args)}`); + if (error.meta.length === 0) { + error.meta.push(toErrorMeta(event)); + } common.logger.error({ service: "indexing", @@ -646,7 +484,7 @@ const executeCallTrace = async ( common.metrics.ponder_indexing_has_error.set(1); - return { status: "error", error: error }; + return { status: "error", error }; } return { status: "success" }; diff --git a/packages/core/src/sync-historical/index.test.ts b/packages/core/src/sync-historical/index.test.ts index 2d1bda3c8..952e78081 100644 --- a/packages/core/src/sync-historical/index.test.ts +++ b/packages/core/src/sync-historical/index.test.ts @@ -1,13 +1,32 @@ +import { ALICE, BOB } from "@/_test/constants.js"; +import { erc20ABI } from "@/_test/generated.js"; import { setupAnvil, setupCommon, setupDatabaseServices, setupIsolatedDatabase, } from "@/_test/setup.js"; -import { simulateFactoryDeploy, simulatePairSwap } from "@/_test/simulate.js"; -import { getRawRPCData } from "@/_test/utils.js"; -import type { RequestQueue } from "@/utils/requestQueue.js"; -import { hexToNumber } from "viem"; +import { + createPair, + deployErc20, + deployFactory, + mintErc20, + swapPair, + transferErc20, + transferEth, +} from "@/_test/simulate.js"; +import { + getAccountsConfigAndIndexingFunctions, + getBlocksConfigAndIndexingFunctions, + getErc20ConfigAndIndexingFunctions, + getNetwork, + getPairWithFactoryConfigAndIndexingFunctions, + testClient, +} from "@/_test/utils.js"; +import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; +import { createRequestQueue } from "@/utils/requestQueue.js"; +import { encodeFunctionData, encodeFunctionResult, toHex } from "viem"; +import { parseEther } from "viem/utils"; import { beforeEach, expect, test, vi } from "vitest"; import { createHistoricalSync } from "./index.js"; @@ -15,52 +34,33 @@ beforeEach(setupCommon); beforeEach(setupAnvil); beforeEach(setupIsolatedDatabase); -// Helper function used to spoof "trace_filter" requests -// because they aren't supported by foundry. -const getRequestQueue = async (requestQueue: RequestQueue) => { - const rpcData = await getRawRPCData(); - - return { - ...requestQueue, - request: (request: any) => { - if (request.method === "trace_filter") { - let traces = [ - ...rpcData.block2.callTraces, - ...rpcData.block3.callTraces, - ...rpcData.block4.callTraces, - ]; - - if (request.params[0].fromBlock !== undefined) { - traces = traces.filter( - (t) => - hexToNumber(t.blockNumber) >= - hexToNumber(request.params[0].fromBlock), - ); - } - if (request.params[0].toBlock) { - traces = traces.filter( - (t) => - hexToNumber(t.blockNumber) <= - hexToNumber(request.params[0].toBlock), - ); - } - - return Promise.resolve(traces); - } - return requestQueue.request(request); - }, - } as RequestQueue; -}; - test("createHistoricalSync()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[0]], + network, + sources, syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); @@ -72,20 +72,46 @@ test("createHistoricalSync()", async (context) => { test("sync() with log filter", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[0]], + network, + sources, syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - await historicalSync.sync([0, 5]); + await historicalSync.sync([1, 2]); const logs = await database.qb.sync.selectFrom("logs").selectAll().execute(); - expect(logs).toHaveLength(2); + expect(logs).toHaveLength(1); const intervals = await database.qb.sync .selectFrom("intervals") @@ -97,25 +123,46 @@ test("sync() with log filter", async (context) => { await cleanup(); }); -test("sync() with log filter and transaction receipts", async (context) => { +test.skip("sync() with log filter and transaction receipts", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - context.sources[0].filter.includeTransactionReceipts = true; + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + includeTransactionReceipts: true, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[0]], + network, + sources, syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - await historicalSync.sync([0, 5]); - - const logs = await database.qb.sync.selectFrom("logs").selectAll().execute(); - - expect(logs).toHaveLength(2); + await historicalSync.sync([1, 3]); const transactionReceipts = await database.qb.sync .selectFrom("transactionReceipts") @@ -137,16 +184,36 @@ test("sync() with log filter and transaction receipts", async (context) => { test("sync() with block filter", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 3 }); + const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[4]], + network, + sources, syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - await historicalSync.sync([0, 5]); + await historicalSync.sync([1, 3]); const blocks = await database.qb.sync .selectFrom("blocks") @@ -168,16 +235,45 @@ test("sync() with block filter", async (context) => { test("sync() with log factory", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployFactory({ sender: ALICE }); + const { result } = await createPair({ factory: address, sender: ALICE }); + await swapPair({ + pair: result, + amount0Out: 1n, + amount1Out: 1n, + to: ALICE, + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[1]], + network, + sources, syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - await historicalSync.sync([0, 5]); + await historicalSync.sync([1, 3]); const logs = await database.qb.sync.selectFrom("logs").selectAll().execute(); @@ -196,23 +292,94 @@ test("sync() with log factory", async (context) => { test("sync() with trace filter", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const { hash } = await transferErc20({ + erc20: address, + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + includeCallTraces: true, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const request = async (request: any) => { + if (request.method === "debug_traceBlockByNumber") { + if (request.params[0] === "0x1") return Promise.resolve([]); + if (request.params[0] === "0x2") return Promise.resolve([]); + if (request.params[0] === "0x3") { + return Promise.resolve([ + { + txHash: hash, + result: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + }, + }, + ]); + } + } + + return requestQueue.request(request); + }; + const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[3]], + network, + sources: sources.filter(({ filter }) => filter.type === "trace"), syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue: { + ...requestQueue, + // @ts-ignore + request, + }, onFatalError: () => {}, }); - await historicalSync.sync([0, 5]); + await historicalSync.sync([1, 3]); - const callTraces = await database.qb.sync - .selectFrom("callTraces") + const traces = await database.qb.sync + .selectFrom("traces") .selectAll() .execute(); - expect(callTraces).toHaveLength(4); + expect(traces).toHaveLength(1); const intervals = await database.qb.sync .selectFrom("intervals") @@ -224,28 +391,219 @@ test("sync() with trace filter", async (context) => { await cleanup(); }); +test("sync() with transaction filter", async (context) => { + const { cleanup, syncStore, database } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const historicalSync = await createHistoricalSync({ + common: context.common, + network, + sources: sources.filter(({ filter }) => filter.type === "transaction"), + syncStore, + requestQueue, + onFatalError: () => {}, + }); + + await historicalSync.sync([1, 1]); + + const transactions = await database.qb.sync + .selectFrom("transactions") + .selectAll() + .execute(); + + expect(transactions).toHaveLength(1); + + const transactionReceipts = await database.qb.sync + .selectFrom("transactionReceipts") + .selectAll() + .execute(); + + expect(transactionReceipts).toHaveLength(1); + + const intervals = await database.qb.sync + .selectFrom("intervals") + .selectAll() + .execute(); + + // transaction:from and transaction:to + expect(intervals).toHaveLength(2); + + await cleanup(); +}); + +test("sync() with transfer filter", async (context) => { + const { cleanup, syncStore, database } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { hash } = await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const request = async (request: any) => { + if (request.method === "debug_traceBlockByNumber") { + if (request.params[0] === "0x1") { + return Promise.resolve([ + { + txHash: hash, + result: { + type: "CALL", + from: ALICE, + to: BOB, + gas: "0x0", + gasUsed: "0x0", + input: "0x0", + output: "0x0", + value: toHex(parseEther("1")), + }, + }, + ]); + } + } + + return requestQueue.request(request); + }; + + const historicalSync = await createHistoricalSync({ + common: context.common, + network, + sources: sources.filter(({ filter }) => filter.type === "transfer"), + syncStore, + requestQueue: { + ...requestQueue, + // @ts-ignore + request, + }, + onFatalError: () => {}, + }); + + await historicalSync.sync([1, 1]); + + const transactions = await database.qb.sync + .selectFrom("transactions") + .selectAll() + .execute(); + + expect(transactions).toHaveLength(1); + + const intervals = await database.qb.sync + .selectFrom("intervals") + .selectAll() + .execute(); + + // transfer:from and transfer:to + expect(intervals).toHaveLength(2); + + await cleanup(); +}); + test("sync() with many filters", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { sources: erc20Sources } = await buildConfigAndIndexingFunctions({ + ...getErc20ConfigAndIndexingFunctions({ + address, + }), + options: { + ponderDir: "", + rootDir: "", + }, + }); + const { sources: blockSources } = await buildConfigAndIndexingFunctions({ + ...getBlocksConfigAndIndexingFunctions({ + interval: 1, + }), + options: { + ponderDir: "", + rootDir: "", + }, + }); + const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: context.sources, + network, + sources: [...erc20Sources, ...blockSources], syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - await historicalSync.sync([0, 5]); + await historicalSync.sync([1, 2]); const logs = await database.qb.sync.selectFrom("logs").selectAll().execute(); - expect(logs).toHaveLength(4); + expect(logs).toHaveLength(1); const blocks = await database.qb.sync .selectFrom("blocks") .selectAll() .execute(); - expect(blocks).toHaveLength(5); + expect(blocks).toHaveLength(2); + + const intervals = await database.qb.sync + .selectFrom("intervals") + .selectAll() + .execute(); + + expect(intervals).toHaveLength(2); await cleanup(); }); @@ -253,30 +611,57 @@ test("sync() with many filters", async (context) => { test("sync() with cache hit", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + let historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[0]], + network, + sources, syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - await historicalSync.sync([0, 5]); + + await historicalSync.sync([1, 2]); // re-instantiate `historicalSync` to reset the cached intervals - const spy = vi.spyOn(context.requestQueues[0], "request"); + const spy = vi.spyOn(requestQueue, "request"); historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[0]], + network, + sources, syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - await historicalSync.sync([0, 5]); + await historicalSync.sync([1, 2]); expect(spy).toHaveBeenCalledTimes(0); await cleanup(); @@ -285,29 +670,55 @@ test("sync() with cache hit", async (context) => { test("syncBlock() with cache", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - // block 2 and 4 will be requested - const blockFilter = context.sources[4].filter; - blockFilter.offset = 0; + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { sources: erc20Sources } = await buildConfigAndIndexingFunctions({ + ...getErc20ConfigAndIndexingFunctions({ + address, + }), + options: { + ponderDir: "", + rootDir: "", + }, + }); + const { sources: blockSources } = await buildConfigAndIndexingFunctions({ + ...getBlocksConfigAndIndexingFunctions({ + interval: 1, + }), + options: { + ponderDir: "", + rootDir: "", + }, + }); const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [ - context.sources[0], - { ...context.sources[4], filter: blockFilter }, - ], + network, + sources: [...erc20Sources, ...blockSources], syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - const spy = vi.spyOn(context.requestQueues[0], "request"); + const spy = vi.spyOn(requestQueue, "request"); - await historicalSync.sync([0, 5]); + await historicalSync.sync([1, 2]); - // 1 call to `syncBlock()` will be cached because - // each source in `sources` matches block 2 - expect(spy).toHaveBeenCalledTimes(4); + // 1 "eth_getLogs" request and only 2 "eth_getBlockByNumber" requests + // because the erc20 and block sources share the block 2 + expect(spy).toHaveBeenCalledTimes(3); await cleanup(); }); @@ -315,28 +726,56 @@ test("syncBlock() with cache", async (context) => { test("syncAddress() handles many addresses", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + context.common.options.factoryAddressCountThreshold = 10; + const { address } = await deployFactory({ sender: ALICE }); + for (let i = 0; i < 10; i++) { - await simulateFactoryDeploy(context.factory.address); + await createPair({ factory: address, sender: ALICE }); } - const pair = await simulateFactoryDeploy(context.factory.address); - await simulatePairSwap(pair); + const { result } = await createPair({ factory: address, sender: ALICE }); + await swapPair({ + pair: result, + amount0Out: 1n, + amount1Out: 1n, + to: ALICE, + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); const historicalSync = await createHistoricalSync({ common: context.common, - network: context.networks[0], - sources: [context.sources[1]], + network, + sources, syncStore, - requestQueue: await getRequestQueue(context.requestQueues[0]), + requestQueue, onFatalError: () => {}, }); - await historicalSync.sync([0, 10 + 5 + 2]); + await historicalSync.sync([1, 13]); const logs = await database.qb.sync.selectFrom("logs").selectAll().execute(); - expect(logs).toHaveLength(14); + // 11 pair creations and 1 swap + expect(logs).toHaveLength(12); await cleanup(); }); diff --git a/packages/core/src/sync-historical/index.ts b/packages/core/src/sync-historical/index.ts index 0800a8ee8..c0fae13cd 100644 --- a/packages/core/src/sync-historical/index.ts +++ b/packages/core/src/sync-historical/index.ts @@ -1,29 +1,36 @@ import type { Common } from "@/common/common.js"; import type { Network } from "@/config/networks.js"; +import { + isTraceFilterMatched, + isTransactionFilterMatched, + isTransferFilterMatched, +} from "@/sync-realtime/filter.js"; import type { SyncStore } from "@/sync-store/index.js"; import { type BlockFilter, - type CallTraceFilter, type Factory, type Filter, type LogFactory, type LogFilter, + type TraceFilter, + type TransferFilter, isAddressFactory, } from "@/sync/source.js"; -import type { Source } from "@/sync/source.js"; -import type { SyncBlock, SyncCallTrace, SyncLog } from "@/types/sync.js"; +import type { Source, TransactionFilter } from "@/sync/source.js"; +import type { SyncBlock, SyncLog, SyncTrace } from "@/types/sync.js"; import { type Interval, getChunks, intervalDifference, + intervalRange, } from "@/utils/interval.js"; import { never } from "@/utils/never.js"; import type { RequestQueue } from "@/utils/requestQueue.js"; import { + _debug_traceBlockByNumber, _eth_getBlockByNumber, _eth_getLogs, _eth_getTransactionReceipt, - _trace_filter, } from "@/utils/rpc.js"; import { getLogsRetryHelper } from "@ponder/utils"; import { @@ -63,7 +70,12 @@ export const createHistoricalSync = async ( * Blocks that have already been extracted. * Note: All entries are deleted at the end of each call to `sync()`. */ - const blockCache = new Map>(); + const blockCache = new Map>(); + /** + * Traces that have already been fetched. + * Note: All entries are deleted at the end of each call to `sync()`. + */ + const traceCache = new Map>(); /** * Transactions that should be saved to the sync-store. * Note: All entries are deleted at the end of each call to `sync()`. @@ -103,13 +115,14 @@ export const createHistoricalSync = async ( let latestBlock: SyncBlock | undefined; //////// - // Helper functions for specific sync tasks + // Helper functions for sync tasks //////// /** - * Split "eth_getLogs" requests into ranges inferred from errors. + * Split "eth_getLogs" requests into ranges inferred from errors + * and batch requests. */ - const getLogsDynamic = async ({ + const syncLogsDynamic = async ({ filter, address, interval, @@ -130,20 +143,30 @@ export const createHistoricalSync = async ( : [interval]; const topics = - "eventSelector" in filter ? [filter.eventSelector] : filter.topics; + "eventSelector" in filter + ? [filter.eventSelector] + : [ + filter.topic0 ?? null, + filter.topic1 ?? null, + filter.topic2 ?? null, + filter.topic3 ?? null, + ]; - // Batch large arrays of addresses, handling arrays that are empty or over the threshold + // Batch large arrays of addresses, handling arrays that are empty let addressBatches: (Address | Address[] | undefined)[]; - if (address === undefined || typeof address === "string") { + + if (address === undefined) { + // no address (match all) + addressBatches = [undefined]; + } else if (typeof address === "string") { + // single address addressBatches = [address]; } else if (address.length === 0) { + // no address (factory with no children) return []; - } else if ( - address.length >= args.common.options.factoryAddressCountThreshold - ) { - addressBatches = [undefined]; } else { + // many addresses addressBatches = []; for (let i = 0; i < address.length; i += 50) { addressBatches.push(address.slice(i, i + 50)); @@ -191,7 +214,7 @@ export const createHistoricalSync = async ( : undefined, }); - return getLogsDynamic({ address, interval, filter }); + return syncLogsDynamic({ address, interval, filter }); }), ), ), @@ -214,20 +237,116 @@ export const createHistoricalSync = async ( return logs; }; + /** + * Extract block, using `blockCache` to avoid fetching + * the same block twice. Also, update `latestBlock`. + * + * @param number Block to be extracted + * + * Note: This function could more accurately skip network requests by taking + * advantage of `syncStore.hasBlock` and `syncStore.hasTransaction`. + */ + const syncBlock = async (number: number): Promise => { + let block: SyncBlock; + + /** + * `blockCache` contains all blocks that have been extracted during the + * current call to `sync()`. If `number` is present in `blockCache` use it, + * otherwise, request the block and add it to `blockCache` and the sync-store. + */ + + if (blockCache.has(number)) { + block = await blockCache.get(number)!; + } else { + const _block = _eth_getBlockByNumber(args.requestQueue, { + blockNumber: toHex(number), + }); + blockCache.set(number, _block); + block = await _block; + + // Update `latestBlock` if `block` is closer to tip. + if ( + hexToBigInt(block.number) >= hexToBigInt(latestBlock?.number ?? "0x0") + ) { + latestBlock = block; + } + } + + return block; + }; + + const syncTrace = async (block: number) => { + if (traceCache.has(block)) { + return await traceCache.get(block)!; + } else { + const traces = _debug_traceBlockByNumber(args.requestQueue, { + blockNumber: block, + }); + traceCache.set(block, traces); + return await traces; + } + }; + + /** Extract and insert the log-based addresses that match `filter` + `interval`. */ + const syncLogFactory = async (filter: LogFactory, interval: Interval) => { + const logs = await syncLogsDynamic({ + filter, + interval, + address: filter.address, + }); + + if (isKilled) return; + + // Insert `logs` into the sync-store + await args.syncStore.insertLogs({ + logs: logs.map((log) => ({ log })), + shouldUpdateCheckpoint: false, + chainId: args.network.chainId, + }); + }; + + /** + * Return all addresses that match `filter` after extracting addresses + * that match `filter` and `interval`. Returns `undefined` if the number of + * child addresses is above the limit. + */ + const syncAddressFactory = async ( + filter: Factory, + interval: Interval, + ): Promise => { + await syncLogFactory(filter, interval); + + // Query the sync-store for all addresses that match `filter`. + const addresses = await args.syncStore.getChildAddresses({ + filter, + limit: args.common.options.factoryAddressCountThreshold, + }); + + if (addresses.length === args.common.options.factoryAddressCountThreshold) { + return undefined; + } + + return addresses; + }; + + //////// + // Helper function for filter types + //////// + const syncLogFilter = async (filter: LogFilter, interval: Interval) => { // Resolve `filter.address` const address = isAddressFactory(filter.address) - ? await syncAddress(filter.address, interval) + ? await syncAddressFactory(filter.address, interval) : filter.address; if (isKilled) return; - const logs = await getLogsDynamic({ filter, interval, address }); + const logs = await syncLogsDynamic({ filter, interval, address }); if (isKilled) return; const blocks = await Promise.all( - logs.map((log) => syncBlock(hexToBigInt(log.blockNumber))), + logs.map((log) => syncBlock(hexToNumber(log.blockNumber))), ); // Validate that logs point to the valid transaction hash in the block @@ -266,20 +385,20 @@ export const createHistoricalSync = async ( if (isKilled) return; - if (filter.includeTransactionReceipts) { - const transactionReceipts = await Promise.all( - Array.from(transactionHashes).map((hash) => - _eth_getTransactionReceipt(args.requestQueue, { hash }), - ), - ); + // if (filter.includeTransactionReceipts) { + // const transactionReceipts = await Promise.all( + // Array.from(transactionHashes).map((hash) => + // _eth_getTransactionReceipt(args.requestQueue, { hash }), + // ), + // ); - if (isKilled) return; + // if (isKilled) return; - await args.syncStore.insertTransactionReceipts({ - transactionReceipts, - chainId: args.network.chainId, - }); - } + // await args.syncStore.insertTransactionReceipts({ + // transactionReceipts, + // chainId: args.network.chainId, + // }); + // } }; const syncBlockFilter = async (filter: BlockFilter, interval: Interval) => { @@ -292,180 +411,146 @@ export const createHistoricalSync = async ( requiredBlocks.push(b); } - await Promise.all(requiredBlocks.map((b) => syncBlock(BigInt(b)))); + await Promise.all(requiredBlocks.map((number) => syncBlock(number))); }; - const syncTraceFilter = async ( - filter: CallTraceFilter, + const syncTransactionFilter = async ( + filter: TransactionFilter, interval: Interval, ) => { - // Resolve `filter.toAddress` - let toAddress: Address[] | undefined; - if (isAddressFactory(filter.toAddress)) { - const childAddresses = await syncAddress(filter.toAddress, interval); - if ( - childAddresses.length < args.common.options.factoryAddressCountThreshold - ) { - toAddress = childAddresses; - } else { - toAddress = undefined; - } - } else { - toAddress = filter.toAddress; - } - - if (isKilled) return; - - let callTraces = await _trace_filter(args.requestQueue, { - fromAddress: filter.fromAddress, - toAddress, - fromBlock: interval[0], - toBlock: interval[1], - }).then( - (traces) => - traces.flat().filter((t) => t.type === "call") as SyncCallTrace[], - ); + const fromChildAddresses = isAddressFactory(filter.fromAddress) + ? await syncAddressFactory(filter.fromAddress, interval).then( + (addresses) => + addresses === undefined ? undefined : new Set(addresses), + ) + : undefined; + + const toChildAddresses = isAddressFactory(filter.toAddress) + ? await syncAddressFactory(filter.toAddress, interval).then( + (addresses) => + addresses === undefined ? undefined : new Set(addresses), + ) + : undefined; if (isKilled) return; const blocks = await Promise.all( - callTraces.map((trace) => syncBlock(hexToBigInt(trace.blockNumber))), + intervalRange(interval).map((number) => syncBlock(number)), ); - const transactionHashes = new Set(callTraces.map((t) => t.transactionHash)); - - // Validate that traces point to the valid transaction hash in the block - for (let i = 0; i < callTraces.length; i++) { - const callTrace = callTraces[i]!; - const block = blocks[i]!; + if (isKilled) return; - if (block.hash !== callTrace.blockHash) { - throw new Error( - `Detected inconsistent RPC responses. 'trace.blockHash' ${callTrace.blockHash} does not match 'block.hash' ${block.hash}`, - ); - } + const transactionHashes: Set = new Set(); + + for (const block of blocks) { + block.transactions.map((transaction) => { + if ( + isTransactionFilterMatched({ + filter, + block, + transaction, + fromChildAddresses, + toChildAddresses, + }) + ) { + transactionHashes.add(transaction.hash); + } + }); + } - if ( - block.transactions.find((t) => t.hash === callTrace.transactionHash) === - undefined - ) { - throw new Error( - `Detected inconsistent RPC responses. 'trace.transactionHash' ${callTrace.transactionHash} not found in 'block.transactions' ${block.hash}`, - ); - } + for (const hash of transactionHashes) { + transactionsCache.add(hash); } - // Request transactionReceipts to check for reverted transactions. + if (isKilled) return; + const transactionReceipts = await Promise.all( Array.from(transactionHashes).map((hash) => - _eth_getTransactionReceipt(args.requestQueue, { - hash, - }), + _eth_getTransactionReceipt(args.requestQueue, { hash }), ), ); - const revertedTransactions = new Set(); - for (const receipt of transactionReceipts) { - if (receipt.status === "0x0") { - revertedTransactions.add(receipt.transactionHash); - } - } - - callTraces = callTraces.filter( - (trace) => revertedTransactions.has(trace.transactionHash) === false, - ); - - if (isKilled) return; - - for (const hash of transactionHashes) { - if (revertedTransactions.has(hash) === false) { - transactionsCache.add(hash); - } - } - if (isKilled) return; - await args.syncStore.insertCallTraces({ - callTraces: callTraces.map((callTrace, i) => ({ - callTrace, - block: blocks[i]!, - })), + await args.syncStore.insertTransactionReceipts({ + transactionReceipts, chainId: args.network.chainId, }); }; - /** Extract and insert the log-based addresses that match `filter` + `interval`. */ - const syncLogFactory = async (filter: LogFactory, interval: Interval) => { - const logs = await getLogsDynamic({ - filter, - interval, - address: filter.address, - }); + const syncTraceOrTransferFilter = async ( + filter: TraceFilter | TransferFilter, + interval: Interval, + ) => { + const fromChildAddresses = isAddressFactory(filter.fromAddress) + ? await syncAddressFactory(filter.fromAddress, interval) + : undefined; + + const toChildAddresses = isAddressFactory(filter.toAddress) + ? await syncAddressFactory(filter.toAddress, interval) + : undefined; + + const traces = await Promise.all( + intervalRange(interval).map(async (number) => { + let traces = await syncTrace(number); + + // remove unmatched traces + traces = traces.filter((trace) => + filter.type === "trace" + ? isTraceFilterMatched({ + filter, + block: { number: toHex(number) }, + trace: trace.trace, + fromChildAddresses: fromChildAddresses + ? new Set(fromChildAddresses) + : undefined, + toChildAddresses: toChildAddresses + ? new Set(toChildAddresses) + : undefined, + }) + : isTransferFilterMatched({ + filter, + block: { number: toHex(number) }, + trace: trace.trace, + fromChildAddresses: fromChildAddresses + ? new Set(fromChildAddresses) + : undefined, + toChildAddresses: toChildAddresses + ? new Set(toChildAddresses) + : undefined, + }), + ); - if (isKilled) return; + if (traces.length === 0) return []; - // Insert `logs` into the sync-store - await args.syncStore.insertLogs({ - logs: logs.map((log) => ({ log })), - shouldUpdateCheckpoint: false, - chainId: args.network.chainId, - }); - }; + const block = await syncBlock(number); - /** - * Extract block, using `blockCache` to avoid fetching - * the same block twice. Also, update `latestBlock`. - * - * @param number Block to be extracted - * @param transactionHashes Hashes to be inserted into the sync-store - * - * Note: This function could more accurately skip network requests by taking - * advantage of `syncStore.hasBlock` and `syncStore.hasTransaction`. - */ - const syncBlock = async (number: bigint): Promise => { - let block: SyncBlock; + return traces.map((trace) => { + const transaction = block.transactions.find( + (t) => t.hash === trace.transactionHash, + ); - /** - * `blockCache` contains all blocks that have been extracted during the - * current call to `sync()`. If `number` is present in `blockCache` use it, - * otherwise, request the block and add it to `blockCache` and the sync-store. - */ + if (transaction === undefined) { + throw new Error( + `Detected inconsistent RPC responses. 'trace.transactionHash' ${trace.transactionHash} not found in 'block.transactions' ${block.hash}`, + ); + } - if (blockCache.has(number)) { - block = await blockCache.get(number)!; - } else { - const _block = _eth_getBlockByNumber(args.requestQueue, { - blockNumber: toHex(number), - }); - blockCache.set(number, _block); - block = await _block; + transactionsCache.add(transaction.hash); - // Update `latestBlock` if `block` is closer to tip. - if ( - hexToBigInt(block.number) >= hexToBigInt(latestBlock?.number ?? "0x0") - ) { - latestBlock = block; - } - } - - return block; - }; + return { trace, transaction, block }; + }); + }), + ).then((traces) => traces.flat()); - /** - * Return all addresses that match `filter` after extracting addresses - * that match `filter` and `interval`. - */ - const syncAddress = async ( - filter: Factory, - interval: Interval, - ): Promise => { - await syncLogFactory(filter, interval); + if (traces.length > 0) { + await args.syncStore.insertTraces({ + traces, + chainId: args.network.chainId, + }); - // Query the sync-store for all addresses that match `filter`. - return await args.syncStore.getChildAddresses({ - filter, - limit: args.common.options.factoryAddressCountThreshold, - }); + // TODO(kyle) includeTransactionReceipt + } }; return { @@ -475,25 +560,25 @@ export const createHistoricalSync = async ( await Promise.all( args.sources.map(async (source) => { + const filter = source.filter; + // Compute the required interval to sync, accounting for cached // intervals and start + end block. // Skip sync if the interval is after the `toBlock` or before // the `fromBlock`. if ( - source.filter.fromBlock > _interval[1] || - (source.filter.toBlock && source.filter.toBlock < _interval[0]) + (filter.fromBlock !== undefined && + filter.fromBlock > _interval[1]) || + (filter.toBlock !== undefined && filter.toBlock < _interval[0]) ) { return; } const interval: Interval = [ - Math.max(source.filter.fromBlock, _interval[0]), - Math.min( - source.filter.toBlock ?? Number.POSITIVE_INFINITY, - _interval[1], - ), + Math.max(filter.fromBlock ?? 0, _interval[0]), + Math.min(filter.toBlock ?? Number.POSITIVE_INFINITY, _interval[1]), ]; - const completedIntervals = intervalsCache.get(source.filter)!; + const completedIntervals = intervalsCache.get(filter)!; const requiredIntervals = intervalDifference( [interval], completedIntervals, @@ -503,35 +588,36 @@ export const createHistoricalSync = async ( if (requiredIntervals.length === 0) return; // Request last block of interval - const blockPromise = syncBlock(BigInt(interval[1])); + const blockPromise = syncBlock(interval[1]); try { // sync required intervals, account for chunk sizes await Promise.all( requiredIntervals.map(async (interval) => { - if (source.type === "contract") { - const filter = source.filter; - switch (filter.type) { - case "log": { - await syncLogFilter(filter, interval); - break; - } - - case "callTrace": - await Promise.all( - getChunks({ interval, maxChunkSize: 10 }).map( - async (interval) => { - await syncTraceFilter(filter, interval); - }, - ), - ); - break; - - default: - never(filter); + switch (filter.type) { + case "log": { + await syncLogFilter(filter, interval); + break; + } + + case "block": { + await syncBlockFilter(filter, interval); + break; + } + + case "transaction": { + await syncTransactionFilter(filter, interval); + break; } - } else { - await syncBlockFilter(source.filter, interval); + + case "trace": + case "transfer": { + await syncTraceOrTransferFilter(filter, interval); + break; + } + + default: + never(filter); } }), ); @@ -553,7 +639,7 @@ export const createHistoricalSync = async ( await blockPromise; - syncedIntervals.push({ filter: source.filter, interval }); + syncedIntervals.push({ filter, interval }); }), ); @@ -562,8 +648,13 @@ export const createHistoricalSync = async ( await Promise.all([ args.syncStore.insertBlocks({ blocks, chainId: args.network.chainId }), args.syncStore.insertTransactions({ - transactions: blocks.flatMap(({ transactions }) => - transactions.filter(({ hash }) => transactionsCache.has(hash)), + transactions: blocks.flatMap((block) => + block.transactions + .filter(({ hash }) => transactionsCache.has(hash)) + .map((transaction) => ({ + transaction, + block, + })), ), chainId: args.network.chainId, }), @@ -578,6 +669,7 @@ export const createHistoricalSync = async ( } blockCache.clear(); + traceCache.clear(); transactionsCache.clear(); return latestBlock; diff --git a/packages/core/src/sync-realtime/bloom.ts b/packages/core/src/sync-realtime/bloom.ts index 7cfcf770e..d154e99a3 100644 --- a/packages/core/src/sync-realtime/bloom.ts +++ b/packages/core/src/sync-realtime/bloom.ts @@ -42,28 +42,28 @@ export function isFilterInBloom({ }): boolean { // Return `false` for out of range blocks if ( - hexToNumber(block.number) < filter.fromBlock || + hexToNumber(block.number) < (filter.fromBlock ?? 0) || hexToNumber(block.number) > (filter.toBlock ?? Number.POSITIVE_INFINITY) ) { return false; } - let isTopicsInBloom: boolean; - let isAddressInBloom: boolean; + const isTopicsInBloom = [ + filter.topic0, + filter.topic1, + filter.topic2, + filter.topic3, + ].every((topic) => { + if (topic === null || topic === undefined) { + return true; + } else if (Array.isArray(topic)) { + return topic.some((t) => isInBloom(block.logsBloom, t)); + } else { + return isInBloom(block.logsBloom, topic); + } + }); - if (filter.topics === undefined || filter.topics.length === 0) { - isTopicsInBloom = true; - } else { - isTopicsInBloom = filter.topics.some((topic) => { - if (topic === null || topic === undefined) { - return true; - } else if (Array.isArray(topic)) { - return topic.some((t) => isInBloom(block.logsBloom, t)); - } else { - return isInBloom(block.logsBloom, topic); - } - }); - } + let isAddressInBloom: boolean; if (filter.address === undefined) isAddressInBloom = true; else if (isAddressFactory(filter.address)) { diff --git a/packages/core/src/sync-realtime/filter.test.ts b/packages/core/src/sync-realtime/filter.test.ts index 284f4a881..c39b603eb 100644 --- a/packages/core/src/sync-realtime/filter.test.ts +++ b/packages/core/src/sync-realtime/filter.test.ts @@ -1,116 +1,423 @@ +import { ALICE, BOB } from "@/_test/constants.js"; +import { erc20ABI } from "@/_test/generated.js"; import { setupAnvil, setupCommon } from "@/_test/setup.js"; -import { getRawRPCData } from "@/_test/utils.js"; -import type { Address } from "viem"; +import { + createPair, + deployErc20, + deployFactory, + mintErc20, + transferErc20, + transferEth, +} from "@/_test/simulate.js"; +import { + getAccountsConfigAndIndexingFunctions, + getBlocksConfigAndIndexingFunctions, + getErc20ConfigAndIndexingFunctions, + getNetwork, + getPairWithFactoryConfigAndIndexingFunctions, +} from "@/_test/utils.js"; +import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; +import type { + BlockFilter, + LogFactory, + LogFilter, + TraceFilter, + TransactionFilter, + TransferFilter, +} from "@/sync/source.js"; +import type { SyncTrace } from "@/types/sync.js"; +import { createRequestQueue } from "@/utils/requestQueue.js"; +import { _eth_getBlockByNumber, _eth_getLogs } from "@/utils/rpc.js"; +import { + type Address, + encodeFunctionData, + encodeFunctionResult, + parseEther, + zeroAddress, + zeroHash, +} from "viem"; import { beforeEach, expect, test } from "vitest"; import { isBlockFilterMatched, - isCallTraceFilterMatched, isLogFactoryMatched, isLogFilterMatched, + isTraceFilterMatched, + isTransactionFilterMatched, + isTransferFilterMatched, } from "./filter.js"; beforeEach(setupCommon); beforeEach(setupAnvil); test("isLogFactoryMatched()", async (context) => { - const rpcData = await getRawRPCData(); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); - let isMatched = isLogFactoryMatched({ - filter: context.sources[1].filter.address, - log: rpcData.block3.logs[0], + const { address } = await deployFactory({ sender: ALICE }); + await createPair({ + factory: address, + sender: ALICE, }); - expect(isMatched).toBe(true); - isMatched = isLogFactoryMatched({ - filter: { - ...context.sources[1].filter.address, - address: [context.sources[1].filter.address.address as Address], + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", }, - log: rpcData.block3.logs[0], }); - expect(isMatched).toBe(true); - isMatched = isLogFactoryMatched({ - filter: context.sources[1].filter.address, - log: rpcData.block2.logs[0], + const filter = sources[0]!.filter as LogFilter; + + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, }); - expect(isMatched).toBe(false); + + let isMatched = isLogFactoryMatched({ + filter: filter.address, + log: rpcLogs[0]!, + }); + expect(isMatched).toBe(true); + + filter.address.address = [filter.address.address as Address]; isMatched = isLogFactoryMatched({ - filter: context.sources[2].filter.toAddress, - log: rpcData.block3.logs[0], + filter: filter.address, + log: rpcLogs[0]!, }); expect(isMatched).toBe(true); + rpcLogs[0]!.topics[0] = zeroHash; + isMatched = isLogFactoryMatched({ - filter: context.sources[2].filter.toAddress, - log: rpcData.block2.logs[0], + filter: filter.address, + log: rpcLogs[0]!, }); expect(isMatched).toBe(false); }); -test("isLogFilterMatched", async (context) => { - const rpcData = await getRawRPCData(); +test("isLogFilterMatched()", async (context) => { + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const filter = sources[0]!.filter as LogFilter; + + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); let isMatched = isLogFilterMatched({ - filter: context.sources[0].filter, - block: rpcData.block2.block, - log: rpcData.block2.logs[1], + filter, + block: rpcBlock, + log: rpcLogs[0]!, }); expect(isMatched).toBe(true); + filter.topic0 = null; + isMatched = isLogFilterMatched({ - filter: context.sources[1].filter, - block: rpcData.block4.block, - log: rpcData.block4.logs[0], + filter, + block: rpcBlock, + log: rpcLogs[0]!, }); expect(isMatched).toBe(true); + rpcLogs[0]!.address = zeroAddress; + isMatched = isLogFilterMatched({ - filter: context.sources[0].filter, - block: rpcData.block4.block, - log: rpcData.block4.logs[0], + filter, + block: rpcBlock, + log: rpcLogs[0]!, }); expect(isMatched).toBe(false); }); -test("isCallTraceFilterMatched", async (context) => { - const rpcData = await getRawRPCData(); +test("isBlockFilterMatched", async (context) => { + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const filter = sources[0]!.filter as BlockFilter; + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 0, + }); - let isMatched = isCallTraceFilterMatched({ - filter: context.sources[3].filter, - block: rpcData.block3.block, - callTrace: rpcData.block3.callTraces[0], + let isMatched = isBlockFilterMatched({ + filter, + block: rpcBlock, }); expect(isMatched).toBe(true); - isMatched = isCallTraceFilterMatched({ - filter: context.sources[2].filter, - block: rpcData.block3.block, - callTrace: rpcData.block3.callTraces[0], + filter.interval = 2; + filter.offset = 1; + + isMatched = isBlockFilterMatched({ + filter, + block: rpcBlock, + }); + expect(isMatched).toBe(false); +}); + +test("isTransactionFilterMatched()", async (context) => { + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // transaction:from + const filter = sources[1]!.filter as TransactionFilter; + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + let isMatched = isTransactionFilterMatched({ + filter, + block: rpcBlock, + transaction: rpcBlock.transactions[0]!, }); expect(isMatched).toBe(true); - isMatched = isCallTraceFilterMatched({ - filter: context.sources[3].filter, - block: rpcData.block2.block, - callTrace: rpcData.block2.callTraces[0], + rpcBlock.transactions[0]!.from = zeroAddress; + + isMatched = isTransactionFilterMatched({ + filter, + block: rpcBlock, + transaction: rpcBlock.transactions[0]!, }); expect(isMatched).toBe(false); }); -test("isBlockFilterMatched", async (context) => { - const rpcData = await getRawRPCData(); +test("isTransferFilterMatched()", async (context) => { + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); - let isMatched = isBlockFilterMatched({ - filter: context.sources[4].filter, - block: rpcData.block2.block, + const { hash } = await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // transfer:from + const filter = sources[3]!.filter as TransferFilter; + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + const rpcTrace = { + trace: { + type: "CALL", + from: ALICE, + to: BOB, + gas: "0x0", + gasUsed: "0x0", + input: "0x0", + output: "0x0", + value: rpcBlock.transactions[0]!.value, + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + let isMatched = isTransferFilterMatched({ + filter, + block: rpcBlock, + trace: rpcTrace.trace, + }); + expect(isMatched).toBe(true); + + rpcTrace.trace.value = "0x0"; + + isMatched = isTransferFilterMatched({ + filter, + block: rpcBlock, + trace: rpcTrace.trace, }); expect(isMatched).toBe(false); +}); - isMatched = isBlockFilterMatched({ - filter: context.sources[4].filter, - block: rpcData.block3.block, +test("isTraceFilterMatched()", async (context) => { + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const { hash } = await transferErc20({ + erc20: address, + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + includeCallTraces: true, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const filter = sources[1]!.filter as TraceFilter; + + const rpcTrace = { + trace: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 3, + }); + + let isMatched = isTraceFilterMatched({ + filter, + block: rpcBlock, + trace: rpcTrace.trace, }); expect(isMatched).toBe(true); + + filter.functionSelector = undefined; + + isMatched = isTraceFilterMatched({ + filter, + block: rpcBlock, + trace: rpcTrace.trace, + }); + expect(isMatched).toBe(true); + + rpcTrace.trace.to = zeroAddress; + + isMatched = isTraceFilterMatched({ + filter, + block: rpcBlock, + trace: rpcTrace.trace, + }); + expect(isMatched).toBe(false); }); diff --git a/packages/core/src/sync-realtime/filter.ts b/packages/core/src/sync-realtime/filter.ts index 281cb432a..9862790ad 100644 --- a/packages/core/src/sync-realtime/filter.ts +++ b/packages/core/src/sync-realtime/filter.ts @@ -1,16 +1,23 @@ import { type BlockFilter, - type CallTraceFilter, type LogFactory, type LogFilter, + type TraceFilter, + type TransactionFilter, + type TransferFilter, isAddressFactory, } from "@/sync/source.js"; -import type { SyncBlock, SyncCallTrace, SyncLog } from "@/types/sync.js"; +import type { + SyncBlock, + SyncLog, + SyncTrace, + SyncTransaction, +} from "@/types/sync.js"; import { toLowerCase } from "@/utils/lowercase.js"; -import { type Address, hexToNumber } from "viem"; +import { type Address, hexToBigInt, hexToNumber } from "viem"; const isValueMatched = ( - filterValue: T | T[] | null | undefined, + filterValue: T | T[] | Set | null | undefined, eventValue: T | undefined, ): boolean => { // match all @@ -27,6 +34,14 @@ const isValueMatched = ( return true; } + // set + if ( + filterValue instanceof Set && + filterValue.has(toLowerCase(eventValue) as T) + ) { + return true; + } + // single if (filterValue === toLowerCase(eventValue)) return true; @@ -60,70 +75,302 @@ export const isLogFilterMatched = ({ filter, block, log, + childAddresses, }: { filter: LogFilter; block: SyncBlock; log: SyncLog; + childAddresses?: Set
| Set
[]; }): boolean => { // Return `false` for out of range blocks if ( - hexToNumber(block.number) < filter.fromBlock || + hexToNumber(block.number) < (filter.fromBlock ?? 0) || hexToNumber(block.number) > (filter.toBlock ?? Number.POSITIVE_INFINITY) ) { return false; } - if (isValueMatched(filter.topics[0], log.topics[0]) === false) return false; - if (isValueMatched(filter.topics[1], log.topics[1]) === false) return false; - if (isValueMatched(filter.topics[2], log.topics[2]) === false) return false; - if (isValueMatched(filter.topics[3], log.topics[3]) === false) return false; + if (isValueMatched(filter.topic0, log.topics[0]) === false) return false; + if (isValueMatched(filter.topic1, log.topics[1]) === false) return false; + if (isValueMatched(filter.topic2, log.topics[2]) === false) return false; + if (isValueMatched(filter.topic3, log.topics[3]) === false) return false; + + if (isAddressFactory(filter.address)) { + if (Array.isArray(childAddresses)) { + if ( + childAddresses.every( + (address) => isValueMatched(address, log.address) === false, + ) + ) { + return false; + } + } else { + if (isValueMatched(childAddresses, log.address) === false) { + return false; + } + } + } else { + if (isValueMatched(filter.address, log.address) === false) { + return false; + } + } + + return true; +}; + +/** + * Returns `true` if `transaction` matches `filter` + */ +export const isTransactionFilterMatched = ({ + filter, + block, + transaction, + fromChildAddresses, + toChildAddresses, +}: { + filter: TransactionFilter; + block: Pick; + transaction: SyncTransaction; + fromChildAddresses?: Set
| Set
[]; + toChildAddresses?: Set
| Set
[]; +}): boolean => { + // Return `false` for out of range blocks if ( - isAddressFactory(filter.address) === false && - isValueMatched( - filter.address as Address | Address[] | undefined, - log.address, - ) === false + hexToNumber(block.number) < (filter.fromBlock ?? 0) || + hexToNumber(block.number) > (filter.toBlock ?? Number.POSITIVE_INFINITY) ) { return false; } + if (isAddressFactory(filter.fromAddress)) { + if (Array.isArray(fromChildAddresses)) { + if ( + fromChildAddresses.every( + (address) => isValueMatched(address, transaction.from) === false, + ) + ) { + return false; + } + } else { + if (isValueMatched(fromChildAddresses, transaction.from) === false) { + return false; + } + } + } else { + if ( + isValueMatched( + filter.fromAddress as Address | Address[] | undefined, + transaction.from, + ) === false + ) { + return false; + } + } + + if (isAddressFactory(filter.toAddress)) { + if (Array.isArray(toChildAddresses)) { + if ( + transaction.to !== null && + toChildAddresses.every( + (address) => isValueMatched(address, transaction.to!) === false, + ) + ) { + return false; + } + } else { + if ( + transaction.to !== null && + isValueMatched(toChildAddresses, transaction.to) === false + ) { + return false; + } + } + } else { + if ( + transaction.to !== null && + isValueMatched( + filter.toAddress as Address | Address[] | undefined, + transaction.to, + ) === false + ) { + return false; + } + } + + // NOTE: `filter.includeReverted` is intentionally ignored + return true; }; /** - * Returns `true` if `callTrace` matches `filter` + * Returns `true` if `trace` matches `filter` */ -export const isCallTraceFilterMatched = ({ +export const isTraceFilterMatched = ({ filter, block, - callTrace, + trace, + fromChildAddresses, + toChildAddresses, }: { - filter: CallTraceFilter; - block: SyncBlock; - callTrace: SyncCallTrace; + filter: TraceFilter; + block: Pick; + trace: Omit; + fromChildAddresses?: Set
| Set
[]; + toChildAddresses?: Set
| Set
[]; }): boolean => { // Return `false` for out of range blocks if ( - hexToNumber(block.number) < filter.fromBlock || + hexToNumber(block.number) < (filter.fromBlock ?? 0) || hexToNumber(block.number) > (filter.toBlock ?? Number.POSITIVE_INFINITY) ) { return false; } - if (isValueMatched(filter.fromAddress, callTrace.action.from) === false) { + if (isAddressFactory(filter.fromAddress)) { + if (Array.isArray(fromChildAddresses)) { + if ( + fromChildAddresses.every( + (address) => isValueMatched(address, trace.from) === false, + ) + ) { + return false; + } + } else { + if (isValueMatched(fromChildAddresses, trace.from) === false) { + return false; + } + } + } else { + if ( + isValueMatched( + filter.fromAddress as Address | Address[] | undefined, + trace.from, + ) === false + ) { + return false; + } + } + + if (isAddressFactory(filter.toAddress)) { + if (Array.isArray(toChildAddresses)) { + if ( + toChildAddresses.every( + (address) => isValueMatched(address, trace.to) === false, + ) + ) { + return false; + } + } else { + if (isValueMatched(toChildAddresses, trace.to) === false) { + return false; + } + } + } else { + if ( + isValueMatched( + filter.toAddress as Address | Address[] | undefined, + trace.to, + ) === false + ) { + return false; + } + } + + if (filter.callType !== trace.type) { + return false; + } + + if ( + isValueMatched(filter.functionSelector, trace.input.slice(0, 10)) === false + ) { return false; } + // NOTE: `filter.includeReverted` is intentionally ignored + + return true; +}; + +/** + * Returns `true` if `trace` matches `filter` + */ +export const isTransferFilterMatched = ({ + filter, + block, + trace, + fromChildAddresses, + toChildAddresses, +}: { + filter: TransferFilter; + block: Pick; + trace: Omit; + fromChildAddresses?: Set
| Set
[]; + toChildAddresses?: Set
| Set
[]; +}): boolean => { + // Return `false` for out of range blocks if ( - isAddressFactory(filter.toAddress) === false && - isValueMatched( - filter.toAddress as Address | Address[] | undefined, - callTrace.action.to, - ) === false + hexToNumber(block.number) < (filter.fromBlock ?? 0) || + hexToNumber(block.number) > (filter.toBlock ?? Number.POSITIVE_INFINITY) ) { return false; } + if (trace.value === undefined || hexToBigInt(trace.value) === 0n) { + return false; + } + + if (isAddressFactory(filter.fromAddress)) { + if (Array.isArray(fromChildAddresses)) { + if ( + fromChildAddresses.every( + (address) => isValueMatched(address, trace.from) === false, + ) + ) { + return false; + } + } else { + if (isValueMatched(fromChildAddresses, trace.from) === false) { + return false; + } + } + } else { + if ( + isValueMatched( + filter.fromAddress as Address | Address[] | undefined, + trace.from, + ) === false + ) { + return false; + } + } + + if (isAddressFactory(filter.toAddress)) { + if (Array.isArray(toChildAddresses)) { + if ( + toChildAddresses.every( + (address) => isValueMatched(address, trace.to) === false, + ) + ) { + return false; + } + } else { + if (isValueMatched(toChildAddresses, trace.to) === false) { + return false; + } + } + } else { + if ( + isValueMatched( + filter.toAddress as Address | Address[] | undefined, + trace.to, + ) === false + ) { + return false; + } + } + + // NOTE: `filter.includeReverted` is intentionally ignored + return true; }; @@ -139,7 +386,7 @@ export const isBlockFilterMatched = ({ }): boolean => { // Return `false` for out of range blocks if ( - hexToNumber(block.number) < filter.fromBlock || + hexToNumber(block.number) < (filter.fromBlock ?? 0) || hexToNumber(block.number) > (filter.toBlock ?? Number.POSITIVE_INFINITY) ) { return false; diff --git a/packages/core/src/sync-realtime/index.test.ts b/packages/core/src/sync-realtime/index.test.ts index 03cbc7227..5a4d10654 100644 --- a/packages/core/src/sync-realtime/index.test.ts +++ b/packages/core/src/sync-realtime/index.test.ts @@ -1,13 +1,38 @@ +import { ALICE, BOB } from "@/_test/constants.js"; +import { erc20ABI } from "@/_test/generated.js"; import { setupAnvil, setupCommon, setupDatabaseServices, setupIsolatedDatabase, } from "@/_test/setup.js"; -import { getRawRPCData, testClient } from "@/_test/utils.js"; -import type { SyncTrace } from "@/types/sync.js"; -import type { RequestQueue } from "@/utils/requestQueue.js"; +import { + createPair, + deployErc20, + deployFactory, + mintErc20, + swapPair, + transferErc20, + transferEth, +} from "@/_test/simulate.js"; +import { + getAccountsConfigAndIndexingFunctions, + getBlocksConfigAndIndexingFunctions, + getErc20ConfigAndIndexingFunctions, + getNetwork, + getPairWithFactoryConfigAndIndexingFunctions, + testClient, +} from "@/_test/utils.js"; +import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; +import type { LogFactory, LogFilter } from "@/sync/source.js"; +import { createRequestQueue } from "@/utils/requestQueue.js"; import { _eth_getBlockByNumber } from "@/utils/rpc.js"; +import { + encodeFunctionData, + encodeFunctionResult, + parseEther, + toHex, +} from "viem"; import { beforeEach, expect, test, vi } from "vitest"; import { type RealtimeSyncEvent, createRealtimeSync } from "./index.js"; @@ -15,41 +40,32 @@ beforeEach(setupCommon); beforeEach(setupAnvil); beforeEach(setupIsolatedDatabase); -// Helper function used to spoof "trace_filter" requests -// because they aren't supported by foundry. -const getRequestQueue = async (requestQueue: RequestQueue) => { - const rpcData = await getRawRPCData(); - - return { - ...requestQueue, - request: (request: any) => { - if (request.method === "trace_block") { - const blockNumber = request.params[0]; - const traces: SyncTrace[] = - blockNumber === rpcData.block1.block.number - ? rpcData.block1.callTraces - : blockNumber === rpcData.block2.block.number - ? rpcData.block2.callTraces - : blockNumber === rpcData.block3.block.number - ? rpcData.block3.callTraces - : blockNumber === rpcData.block4.block.number - ? rpcData.block4.callTraces - : rpcData.block5.callTraces; - - return Promise.resolve(traces); - } else return requestQueue.request(request); - }, - } as RequestQueue; -}; - test("createRealtimeSyncService()", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: requestQueues[0], + network, + requestQueue, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -61,17 +77,37 @@ test("createRealtimeSyncService()", async (context) => { }); test("start() handles block", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { - blockNumber: 4, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, }); + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 0, + }); + + await testClient.mine({ blocks: 1 }); + const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: await getRequestQueue(requestQueues[0]), + network, + requestQueue, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -91,17 +127,37 @@ test("start() handles block", async (context) => { }); test("start() no-op when receiving same block twice", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { - blockNumber: 4, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, }); + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 0, + }); + + await testClient.mine({ blocks: 1 }); + const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: await getRequestQueue(requestQueues[0]), + network, + requestQueue, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -113,7 +169,7 @@ test("start() no-op when receiving same block twice", async (context) => { }); await queue.onIdle(); - await _eth_getBlockByNumber(requestQueues[0], { blockNumber: 5 }).then( + await _eth_getBlockByNumber(requestQueue, { blockNumber: 1 }).then( // @ts-ignore (block) => queue.add({ block }), ); @@ -128,17 +184,37 @@ test("start() no-op when receiving same block twice", async (context) => { }); test("start() gets missing block", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { blockNumber: 0, }); + await testClient.mine({ blocks: 2 }); + const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: await getRequestQueue(requestQueues[0]), + network, + requestQueue, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -146,15 +222,12 @@ test("start() gets missing block", async (context) => { const queue = await realtimeSync.start({ syncProgress: { finalized: finalizedBlock }, - initialChildAddresses: new Map([ - [sources[1].filter.address, new Set()], - [sources[2].filter.toAddress, new Set()], - ]), + initialChildAddresses: new Map(), }); await queue.onIdle(); - expect(realtimeSync.unfinalizedBlocks).toHaveLength(5); + expect(realtimeSync.unfinalizedBlocks).toHaveLength(2); await realtimeSync.kill(); @@ -162,20 +235,38 @@ test("start() gets missing block", async (context) => { }); test("start() retries on error", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { blockNumber: 0, }); - const requestQueue = await getRequestQueue(requestQueues[0]); + await testClient.mine({ blocks: 1 }); const requestSpy = vi.spyOn(requestQueue, "request"); const realtimeSync = createRealtimeSync({ common, - network: networks[0], + network, requestQueue, sources, onEvent: vi.fn(), @@ -186,10 +277,7 @@ test("start() retries on error", async (context) => { const queue = await realtimeSync.start({ syncProgress: { finalized: finalizedBlock }, - initialChildAddresses: new Map([ - [sources[1].filter.address, new Set()], - [sources[2].filter.toAddress, new Set()], - ]), + initialChildAddresses: new Map(), }); await queue.onIdle(); @@ -202,17 +290,37 @@ test("start() retries on error", async (context) => { }); test("kill()", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { - blockNumber: 3, + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 0, }); + await testClient.mine({ blocks: 2 }); + const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: await getRequestQueue(requestQueues[0]), + network, + requestQueue, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -220,10 +328,7 @@ test("kill()", async (context) => { await realtimeSync.start({ syncProgress: { finalized: finalizedBlock }, - initialChildAddresses: new Map([ - [sources[1].filter.address, new Set()], - [sources[2].filter.toAddress, new Set()], - ]), + initialChildAddresses: new Map(), }); await realtimeSync.kill(); @@ -233,41 +338,312 @@ test("kill()", async (context) => { await cleanup(); }); -test("handleBlock() block event", async (context) => { - const { common, networks, requestQueues, sources } = context; +test("handleBlock() block event with log", async (context) => { + const { common } = context; const { cleanup } = await setupDatabaseServices(context); + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const data: Extract[] = []; const onEvent = vi.fn(async (_data) => { data.push(_data); }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + const realtimeSync = createRealtimeSync({ + common, + network, + requestQueue, + sources, + onEvent, + onFatalError: vi.fn(), + }); + + const queue = await realtimeSync.start({ + syncProgress: { finalized: finalizedBlock }, + initialChildAddresses: new Map(), + }); + await queue.onIdle(); + + expect(realtimeSync.unfinalizedBlocks).toHaveLength(1); + + expect(onEvent).toHaveBeenCalledTimes(1); + expect(onEvent).toHaveBeenCalledWith({ + type: "block", + filters: expect.any(Object), + block: expect.any(Object), + logs: expect.any(Object), + factoryLogs: expect.any(Object), + transactions: expect.any(Object), + traces: expect.any(Object), + transactionReceipts: expect.any(Object), + }); + + expect(data[0]?.block.number).toBe("0x2"); + expect(data[0]?.logs).toHaveLength(1); + expect(data[0]?.traces).toHaveLength(0); + expect(data[0]?.transactions).toHaveLength(1); + + await realtimeSync.kill(); + + await cleanup(); +}); + +test("handleBlock() block event with log factory", async (context) => { + const { common } = context; + const { cleanup } = await setupDatabaseServices(context); + + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployFactory({ sender: ALICE }); + const { result: pair } = await createPair({ + factory: address, + sender: ALICE, + }); + await swapPair({ + pair, + amount0Out: 1n, + amount1Out: 1n, + to: ALICE, + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const filter = sources[0]!.filter as LogFilter; + + const data: Extract[] = []; + + const onEvent = vi.fn(async (_data) => { + data.push(_data); + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + const realtimeSync = createRealtimeSync({ + common, + network, + requestQueue, + sources, + onEvent, + onFatalError: vi.fn(), + }); + + const queue = await realtimeSync.start({ + syncProgress: { finalized: finalizedBlock }, + initialChildAddresses: new Map([[filter.address, new Set()]]), + }); + await queue.onIdle(); + + expect(realtimeSync.unfinalizedBlocks).toHaveLength(2); + + expect(onEvent).toHaveBeenCalledTimes(2); + expect(onEvent).toHaveBeenCalledWith({ + type: "block", + filters: expect.any(Object), + block: expect.any(Object), + logs: expect.any(Object), + factoryLogs: expect.any(Object), + transactions: expect.any(Object), + traces: expect.any(Object), + transactionReceipts: expect.any(Object), + }); + + expect(data[0]?.block.number).toBe("0x2"); + expect(data[1]?.block.number).toBe("0x3"); + + expect(data[0]?.logs).toHaveLength(0); + expect(data[1]?.logs).toHaveLength(1); + + expect(data[0]?.factoryLogs).toHaveLength(1); + expect(data[1]?.factoryLogs).toHaveLength(0); + + expect(data[0]?.traces).toHaveLength(0); + expect(data[1]?.traces).toHaveLength(0); + + expect(data[0]?.transactions).toHaveLength(0); + expect(data[1]?.transactions).toHaveLength(1); + + await realtimeSync.kill(); + + await cleanup(); +}); + +test("handleBlock() block event with block", async (context) => { + const { common } = context; + const { cleanup } = await setupDatabaseServices(context); + + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const data: Extract[] = []; + + const onEvent = vi.fn(async (_data) => { + data.push(_data); + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { blockNumber: 0, }); const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: await getRequestQueue(requestQueues[0]), + network, + requestQueue, sources, onEvent, onFatalError: vi.fn(), }); + await testClient.mine({ blocks: 1 }); + + const queue = await realtimeSync.start({ + syncProgress: { finalized: finalizedBlock }, + initialChildAddresses: new Map(), + }); + await queue.onIdle(); + + expect(realtimeSync.unfinalizedBlocks).toHaveLength(1); + + expect(onEvent).toHaveBeenCalledTimes(1); + expect(onEvent).toHaveBeenCalledWith({ + type: "block", + filters: expect.any(Object), + block: expect.any(Object), + logs: expect.any(Object), + factoryLogs: expect.any(Object), + transactions: expect.any(Object), + traces: expect.any(Object), + transactionReceipts: expect.any(Object), + }); + + expect(data[0]?.block.number).toBe("0x1"); + expect(data[0]?.logs).toHaveLength(0); + expect(data[0]?.traces).toHaveLength(0); + expect(data[0]?.transactions).toHaveLength(0); + + await realtimeSync.kill(); + + await cleanup(); +}); + +test("handleBlock() block event with transaction", async (context) => { + const { common } = context; + const { cleanup } = await setupDatabaseServices(context); + + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const data: Extract[] = []; + + const onEvent = vi.fn(async (_data) => { + data.push(_data); + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 0, + }); + + const realtimeSync = createRealtimeSync({ + common, + network, + requestQueue, + sources: sources.filter(({ filter }) => filter.type === "transaction"), + onEvent, + onFatalError: vi.fn(), + }); + const queue = await realtimeSync.start({ syncProgress: { finalized: finalizedBlock }, - initialChildAddresses: new Map([ - [sources[1].filter.address, new Set()], - [sources[2].filter.toAddress, new Set()], - ]), + initialChildAddresses: new Map(), }); await queue.onIdle(); - expect(realtimeSync.unfinalizedBlocks).toHaveLength(5); + expect(realtimeSync.unfinalizedBlocks).toHaveLength(1); - expect(onEvent).toHaveBeenCalledTimes(5); + expect(onEvent).toHaveBeenCalledTimes(1); expect(onEvent).toHaveBeenCalledWith({ type: "block", filters: expect.any(Object), @@ -275,33 +651,278 @@ test("handleBlock() block event", async (context) => { logs: expect.any(Object), factoryLogs: expect.any(Object), transactions: expect.any(Object), - callTraces: expect.any(Object), + traces: expect.any(Object), transactionReceipts: expect.any(Object), }); expect(data[0]?.block.number).toBe("0x1"); - expect(data[1]?.block.number).toBe("0x2"); - expect(data[2]?.block.number).toBe("0x3"); - expect(data[3]?.block.number).toBe("0x4"); - expect(data[4]?.block.number).toBe("0x5"); + expect(data[0]?.logs).toHaveLength(0); + expect(data[0]?.traces).toHaveLength(0); + expect(data[0]?.transactions).toHaveLength(1); + expect(data[0]?.transactionReceipts).toHaveLength(1); + + await realtimeSync.kill(); + + await cleanup(); +}); +test("handleBlock() block event with transfer", async (context) => { + const { common } = context; + const { cleanup } = await setupDatabaseServices(context); + + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { hash } = await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const request = async (request: any) => { + if (request.method === "debug_traceBlockByHash") { + return Promise.resolve([ + { + txHash: hash, + result: { + type: "CALL", + from: ALICE, + to: BOB, + gas: "0x0", + gasUsed: "0x0", + input: "0x0", + output: "0x0", + value: toHex(parseEther("1")), + }, + }, + ]); + } + + return requestQueue.request(request); + }; + + const data: Extract[] = []; + + const onEvent = vi.fn(async (_data) => { + data.push(_data); + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 0, + }); + + const realtimeSync = createRealtimeSync({ + common, + network, + requestQueue: { + ...requestQueue, + // @ts-ignore + request, + }, + sources, + onEvent, + onFatalError: vi.fn(), + }); + + const queue = await realtimeSync.start({ + syncProgress: { finalized: finalizedBlock }, + initialChildAddresses: new Map(), + }); + await queue.onIdle(); + + expect(realtimeSync.unfinalizedBlocks).toHaveLength(1); + + expect(onEvent).toHaveBeenCalledTimes(1); + expect(onEvent).toHaveBeenCalledWith({ + type: "block", + filters: expect.any(Object), + block: expect.any(Object), + logs: expect.any(Object), + factoryLogs: expect.any(Object), + transactions: expect.any(Object), + traces: expect.any(Object), + transactionReceipts: expect.any(Object), + }); + + expect(data[0]?.block.number).toBe("0x1"); expect(data[0]?.logs).toHaveLength(0); - expect(data[1]?.logs).toHaveLength(2); - expect(data[2]?.logs).toHaveLength(0); - expect(data[3]?.logs).toHaveLength(1); - expect(data[4]?.logs).toHaveLength(0); + expect(data[0]?.traces).toHaveLength(1); + expect(data[0]?.transactions).toHaveLength(1); + expect(data[0]?.transactionReceipts).toHaveLength(1); - expect(data[0]?.callTraces).toHaveLength(0); - expect(data[1]?.callTraces).toHaveLength(0); - expect(data[2]?.callTraces).toHaveLength(1); - expect(data[3]?.callTraces).toHaveLength(1); - expect(data[4]?.callTraces).toHaveLength(0); + await realtimeSync.kill(); - expect(data[0]?.transactions).toHaveLength(0); - expect(data[1]?.transactions).toHaveLength(2); - expect(data[2]?.transactions).toHaveLength(1); - expect(data[3]?.transactions).toHaveLength(1); - expect(data[4]?.transactions).toHaveLength(0); + await cleanup(); +}); + +test("handleBlock() block event with trace", async (context) => { + const { common } = context; + const { cleanup } = await setupDatabaseServices(context); + + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + await transferErc20({ + erc20: address, + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const block2 = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); + + const block3 = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 3, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + includeCallTraces: true, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const request = async (request: any) => { + if (request.method === "debug_traceBlockByHash") { + if (request.params[0] === block2.hash) { + return Promise.resolve([ + { + txHash: block2.transactions[0]!.hash, + result: { + type: "CREATE", + from: ALICE, + gas: "0x0", + gasUsed: "0x0", + input: "0x0", + value: "0x0", + }, + }, + ]); + } + + if (request.params[0] === block3.hash) { + return Promise.resolve([ + { + txHash: block3.transactions[0]!.hash, + result: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + }, + }, + ]); + } + + return Promise.resolve([]); + } + + return requestQueue.request(request); + }; + + const data: Extract[] = []; + + const onEvent = vi.fn(async (_data) => { + data.push(_data); + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + const realtimeSync = createRealtimeSync({ + common, + network, + requestQueue: { + ...requestQueue, + // @ts-ignore + request, + }, + sources, + onEvent, + onFatalError: vi.fn(), + }); + + const queue = await realtimeSync.start({ + syncProgress: { finalized: finalizedBlock }, + initialChildAddresses: new Map(), + }); + await queue.onIdle(); + + expect(realtimeSync.unfinalizedBlocks).toHaveLength(2); + + expect(onEvent).toHaveBeenCalledTimes(2); + expect(onEvent).toHaveBeenCalledWith({ + type: "block", + filters: expect.any(Object), + block: expect.any(Object), + logs: expect.any(Object), + factoryLogs: expect.any(Object), + transactions: expect.any(Object), + traces: expect.any(Object), + transactionReceipts: expect.any(Object), + }); + + expect(data[0]?.block.number).toBe("0x2"); + expect(data[1]?.block.number).toBe("0x3"); + + expect(data[0]?.logs).toHaveLength(1); + expect(data[1]?.logs).toHaveLength(1); + + expect(data[0]?.traces).toHaveLength(0); + expect(data[1]?.traces).toHaveLength(1); + + expect(data[0]?.transactions).toHaveLength(1); + expect(data[1]?.transactions).toHaveLength(1); + + expect(data[0]?.transactionReceipts).toHaveLength(0); + expect(data[1]?.transactionReceipts).toHaveLength(0); await realtimeSync.kill(); @@ -309,10 +930,28 @@ test("handleBlock() block event", async (context) => { }); test("handleBlock() finalize event", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { blockNumber: 0, }); @@ -324,8 +963,8 @@ test("handleBlock() finalize event", async (context) => { const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: await getRequestQueue(requestQueues[0]), + network, + requestQueue, sources, onEvent, onFatalError: vi.fn(), @@ -335,10 +974,7 @@ test("handleBlock() finalize event", async (context) => { const queue = await realtimeSync.start({ syncProgress: { finalized: finalizedBlock }, - initialChildAddresses: new Map([ - [sources[1].filter.address, new Set()], - [sources[2].filter.toAddress, new Set()], - ]), + initialChildAddresses: new Map(), }); await queue.onIdle(); @@ -347,9 +983,9 @@ test("handleBlock() finalize event", async (context) => { block: expect.any(Object), }); - expect(realtimeSync.unfinalizedBlocks).toHaveLength(5); + expect(realtimeSync.unfinalizedBlocks).toHaveLength(2); - expect(data[0]?.block.number).toBe("0x4"); + expect(data[0]?.block.number).toBe("0x2"); await realtimeSync.kill(); @@ -357,33 +993,50 @@ test("handleBlock() finalize event", async (context) => { }); test("handleReorg() finds common ancestor", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { - blockNumber: 0, + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, }); const onEvent = vi.fn(); + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 0, + }); + const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: await getRequestQueue(requestQueues[0]), + network, + requestQueue, sources, onEvent, onFatalError: vi.fn(), }); + await testClient.mine({ blocks: 3 }); + const queue = await realtimeSync.start({ syncProgress: { finalized: finalizedBlock }, - initialChildAddresses: new Map([ - [sources[1].filter.address, new Set()], - [sources[2].filter.toAddress, new Set()], - ]), + initialChildAddresses: new Map(), }); - await _eth_getBlockByNumber(requestQueues[0], { blockNumber: 3 }).then( + await _eth_getBlockByNumber(requestQueue, { blockNumber: 2 }).then( // @ts-ignore (block) => queue.add({ block }), ); @@ -392,10 +1045,10 @@ test("handleReorg() finds common ancestor", async (context) => { expect(onEvent).toHaveBeenCalledWith({ type: "reorg", block: expect.any(Object), - reorgedBlocks: [expect.any(Object), expect.any(Object), expect.any(Object)], + reorgedBlocks: [expect.any(Object), expect.any(Object)], }); - expect(realtimeSync.unfinalizedBlocks).toHaveLength(2); + expect(realtimeSync.unfinalizedBlocks).toHaveLength(1); await realtimeSync.kill(); @@ -403,42 +1056,59 @@ test("handleReorg() finds common ancestor", async (context) => { }); test("handleReorg() throws error for deep reorg", async (context) => { - const { common, networks, requestQueues, sources } = context; + const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const finalizedBlock = await _eth_getBlockByNumber(requestQueues[0], { + const network = getNetwork({ finalityBlockCount: 2 }); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { blockNumber: 0, }); const realtimeSync = createRealtimeSync({ common, - network: networks[0], - requestQueue: await getRequestQueue(requestQueues[0]), + network, + requestQueue, sources, onEvent: vi.fn(), onFatalError: vi.fn(), }); + await testClient.mine({ blocks: 3 }); + const queue = await realtimeSync.start({ syncProgress: { finalized: finalizedBlock }, - initialChildAddresses: new Map([ - [sources[1].filter.address, new Set()], - [sources[2].filter.toAddress, new Set()], - ]), + initialChildAddresses: new Map(), }); await queue.onIdle(); - const block = await _eth_getBlockByNumber(requestQueues[0], { - blockNumber: 5, + const block = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 3, }); // @ts-ignore await queue.add({ block: { ...block, - number: "0x6", + number: "0x4", hash: "0x0000000000000000000000000000000000000000000000000000000000000000", - parentHash: realtimeSync.unfinalizedBlocks[3]!.hash, + parentHash: realtimeSync.unfinalizedBlocks[1]!.hash, }, }); diff --git a/packages/core/src/sync-realtime/index.ts b/packages/core/src/sync-realtime/index.ts index 5ffbe1b4b..2c274cac6 100644 --- a/packages/core/src/sync-realtime/index.ts +++ b/packages/core/src/sync-realtime/index.ts @@ -3,30 +3,32 @@ import type { Network } from "@/config/networks.js"; import { type SyncProgress, syncBlockToLightBlock } from "@/sync/index.js"; import { type BlockFilter, - type CallTraceFilter, type Factory, type Filter, type LogFilter, type Source, + type TraceFilter, + type TransactionFilter, + type TransferFilter, getChildAddress, isAddressFactory, } from "@/sync/source.js"; import type { LightBlock, SyncBlock, - SyncCallTrace, SyncLog, + SyncTrace, SyncTransaction, SyncTransactionReceipt, } from "@/types/sync.js"; import { range } from "@/utils/range.js"; import type { RequestQueue } from "@/utils/requestQueue.js"; import { + _debug_traceBlockByHash, _eth_getBlockByHash, _eth_getBlockByNumber, _eth_getLogs, _eth_getTransactionReceipt, - _trace_block, } from "@/utils/rpc.js"; import { wait } from "@/utils/wait.js"; import { type Queue, createQueue } from "@ponder/common"; @@ -34,9 +36,11 @@ import { type Address, type Hash, hexToNumber } from "viem"; import { isFilterInBloom, zeroLogsBloom } from "./bloom.js"; import { isBlockFilterMatched, - isCallTraceFilterMatched, isLogFactoryMatched, isLogFilterMatched, + isTraceFilterMatched, + isTransactionFilterMatched, + isTransferFilterMatched, } from "./filter.js"; export type RealtimeSync = { @@ -64,7 +68,7 @@ export type BlockWithEventData = { filters: Set; logs: SyncLog[]; factoryLogs: SyncLog[]; - callTraces: SyncCallTrace[]; + traces: SyncTrace[]; transactions: SyncTransaction[]; transactionReceipts: SyncTransactionReceipt[]; }; @@ -112,27 +116,52 @@ export const createRealtimeSync = ( const factories: Factory[] = []; const logFilters: LogFilter[] = []; - const callTraceFilters: CallTraceFilter[] = []; + const traceFilters: TraceFilter[] = []; + const transactionFilters: TransactionFilter[] = []; + const transferFilters: TransferFilter[] = []; const blockFilters: BlockFilter[] = []; for (const source of args.sources) { + // Collect filters from sources if (source.type === "contract") { if (source.filter.type === "log") { logFilters.push(source.filter); - } else if (source.filter.type === "callTrace") { - callTraceFilters.push(source.filter); + } else if (source.filter.type === "trace") { + traceFilters.push(source.filter); } - - const _address = - source.filter.type === "log" - ? source.filter.address - : source.filter.toAddress; - if (isAddressFactory(_address)) { - factories.push(_address); + } else if (source.type === "account") { + if (source.filter.type === "transaction") { + transactionFilters.push(source.filter); + } else if (source.filter.type === "transfer") { + transferFilters.push(source.filter); } } else if (source.type === "block") { blockFilters.push(source.filter); } + + // Collect factories from sources + switch (source.filter.type) { + case "trace": + case "transaction": + case "transfer": { + const { fromAddress, toAddress } = source.filter; + + if (isAddressFactory(fromAddress)) { + factories.push(fromAddress); + } + if (isAddressFactory(toAddress)) { + factories.push(toAddress); + } + break; + } + case "log": { + const { address } = source.filter; + if (isAddressFactory(address)) { + factories.push(address); + } + break; + } + } } for (const factory of factories) { @@ -153,7 +182,7 @@ export const createRealtimeSync = ( block, logs, factoryLogs, - callTraces, + traces, transactions, transactionReceipts, }: Omit) => { @@ -185,16 +214,20 @@ export const createRealtimeSync = ( let isMatched = false; for (const filter of logFilters) { + const childAddresses = isAddressFactory(filter.address) + ? [ + finalizedChildAddresses.get(filter.address)!, + unfinalizedChildAddresses.get(filter.address)!, + ] + : undefined; + if ( - isLogFilterMatched({ filter, block, log }) && - (isAddressFactory(filter.address) - ? finalizedChildAddresses - .get(filter.address)! - .has(log.address.toLowerCase() as Address) || - unfinalizedChildAddresses - .get(filter.address)! - .has(log.address.toLowerCase() as Address) - : true) + isLogFilterMatched({ + filter, + block, + log, + childAddresses, + }) ) { matchedFilters.add(filter); isMatched = true; @@ -204,21 +237,60 @@ export const createRealtimeSync = ( return isMatched; }); - // Remove call traces that don't match a filter, accounting for factory addresses - callTraces = callTraces.filter((callTrace) => { + traces = traces.filter((trace) => { let isMatched = false; + for (const filter of transferFilters) { + const fromChildAddresses = isAddressFactory(filter.fromAddress) + ? [ + finalizedChildAddresses.get(filter.fromAddress)!, + unfinalizedChildAddresses.get(filter.fromAddress)!, + ] + : undefined; + + const toChildAddresses = isAddressFactory(filter.toAddress) + ? [ + finalizedChildAddresses.get(filter.toAddress)!, + unfinalizedChildAddresses.get(filter.toAddress)!, + ] + : undefined; - for (const filter of callTraceFilters) { if ( - isCallTraceFilterMatched({ filter, block, callTrace }) && - (isAddressFactory(filter.toAddress) - ? finalizedChildAddresses - .get(filter.toAddress)! - .has(callTrace.action.to.toLowerCase() as Address) || - unfinalizedChildAddresses - .get(filter.toAddress)! - .has(callTrace.action.to.toLowerCase() as Address) - : true) + isTransferFilterMatched({ + filter, + block: { number: block.number }, + trace: trace.trace, + fromChildAddresses, + toChildAddresses, + }) + ) { + matchedFilters.add(filter); + isMatched = true; + } + } + + for (const filter of traceFilters) { + const fromChildAddresses = isAddressFactory(filter.fromAddress) + ? [ + finalizedChildAddresses.get(filter.fromAddress)!, + unfinalizedChildAddresses.get(filter.fromAddress)!, + ] + : undefined; + + const toChildAddresses = isAddressFactory(filter.toAddress) + ? [ + finalizedChildAddresses.get(filter.toAddress)!, + unfinalizedChildAddresses.get(filter.toAddress)!, + ] + : undefined; + + if ( + isTraceFilterMatched({ + filter, + block: { number: block.number }, + trace: trace.trace, + fromChildAddresses, + toChildAddresses, + }) ) { matchedFilters.add(filter); isMatched = true; @@ -229,15 +301,52 @@ export const createRealtimeSync = ( }); // Remove transactions and transaction receipts that may have been filtered out + const transactionHashes = new Set(); for (const log of logs) { transactionHashes.add(log.transactionHash); } - for (const trace of callTraces) { + for (const trace of traces) { transactionHashes.add(trace.transactionHash); } - transactions = transactions.filter((t) => transactionHashes.has(t.hash)); + transactions = transactions.filter((transaction) => { + let isMatched = transactionHashes.has(transaction.hash); + for (const filter of transactionFilters) { + const fromChildAddresses = isAddressFactory(filter.fromAddress) + ? [ + finalizedChildAddresses.get(filter.fromAddress)!, + unfinalizedChildAddresses.get(filter.fromAddress)!, + ] + : undefined; + + const toChildAddresses = isAddressFactory(filter.toAddress) + ? [ + finalizedChildAddresses.get(filter.toAddress)!, + unfinalizedChildAddresses.get(filter.toAddress)!, + ] + : undefined; + + if ( + isTransactionFilterMatched({ + filter, + block, + transaction, + fromChildAddresses, + toChildAddresses, + }) + ) { + matchedFilters.add(filter); + isMatched = true; + } + } + return isMatched; + }); + + for (const transaction of transactions) { + transactionHashes.add(transaction.hash); + } + transactionReceipts = transactionReceipts.filter((t) => transactionHashes.has(t.transactionHash), ); @@ -249,7 +358,7 @@ export const createRealtimeSync = ( } } - if (logs.length > 0 || callTraces.length > 0) { + if (logs.length > 0 || traces.length > 0 || transactions.length > 0) { const _text: string[] = []; if (logs.length === 1) { @@ -258,10 +367,16 @@ export const createRealtimeSync = ( _text.push(`${logs.length} logs`); } - if (callTraces.length === 1) { - _text.push("1 call trace"); - } else if (callTraces.length > 1) { - _text.push(`${callTraces.length} call traces`); + if (traces.length === 1) { + _text.push("1 trace"); + } else if (traces.length > 1) { + _text.push(`${traces.length} traces`); + } + + if (transactions.length === 1) { + _text.push("1 transaction"); + } else if (transactions.length > 1) { + _text.push(`${transactions.length} transactions`); } const text = _text.filter((t) => t !== undefined).join(" and "); @@ -288,7 +403,7 @@ export const createRealtimeSync = ( block, factoryLogs, logs, - callTraces, + traces, transactions, transactionReceipts, }); @@ -513,32 +628,32 @@ export const createRealtimeSync = ( // Traces //////// - const shouldRequestTraces = callTraceFilters.length > 0; + const shouldRequestTraces = + traceFilters.length > 0 || transferFilters.length > 0; - let callTraces: SyncCallTrace[] = []; + let traces: SyncTrace[] = []; if (shouldRequestTraces) { - const traces = await _trace_block(args.requestQueue, { - blockNumber: hexToNumber(block.number), + traces = await _debug_traceBlockByHash(args.requestQueue, { + hash: block.hash, }); // Protect against RPCs returning empty traces. Known to happen near chain tip. // Use the fact that any transaction produces a trace. if (block.transactions.length !== 0 && traces.length === 0) { throw new Error( - "Detected invalid trace_block response. `block.transactions` is not empty but zero traces were returned.", + "Detected invalid debug_traceBlock response. `block.transactions` is not empty but zero traces were returned.", ); } - - callTraces = traces.filter( - (trace) => trace.type === "call", - ) as SyncCallTrace[]; } - // Check that traces refer to the correct block - for (const trace of callTraces) { - if (trace.blockHash !== block.hash) { + // Validate that each trace point to valid transaction in the block + for (const trace of traces) { + if ( + block.transactions.find((t) => t.hash === trace.transactionHash) === + undefined + ) { throw new Error( - `Detected inconsistent RPC responses. 'trace.blockHash' ${trace.blockHash} does not match 'block.hash' ${block.hash}`, + `Detected inconsistent RPC responses. 'trace.txHash' ${trace.transactionHash} not found in 'block' ${block.hash}`, ); } } @@ -571,44 +686,71 @@ export const createRealtimeSync = ( // Remove logs that don't match a filter, recording required transactions logs = logs.filter((log) => { - let isLogMatched = false; + let isMatched = false; for (const filter of logFilters) { if (isLogFilterMatched({ filter, block, log })) { - isLogMatched = true; + // TODO: includeTransactionReceipt + // return early if includeTransactionReceipt, otherwise continue checking filters requiredTransactions.add(log.transactionHash); - if (filter.includeTransactionReceipts) { - requiredTransactionReceipts.add(log.transactionHash); - } + isMatched = true; } } - return isLogMatched; + return isMatched; }); - // Remove call traces that don't match a filter, recording required transactions - callTraces = callTraces.filter((callTrace) => { - let isCallTraceMatched = false; - for (const filter of callTraceFilters) { - if (isCallTraceFilterMatched({ filter, block, callTrace })) { - isCallTraceMatched = true; - requiredTransactions.add(callTrace.transactionHash); - if (filter.includeTransactionReceipts) { - requiredTransactionReceipts.add(callTrace.transactionHash); - } + // Initial weak trace filtering before full filtering with factory addresses in handleBlock + traces = traces.filter((trace) => { + let isMatched = false; + for (const filter of transferFilters) { + if ( + isTransferFilterMatched({ + filter, + block: { number: block.number }, + trace: trace.trace, + }) + ) { + // TODO: includeTransactionReceipt + // return early if includeTransactionReceipt, otherwise continue checking filters + requiredTransactions.add(trace.transactionHash); + isMatched = true; + } + } + + for (const filter of traceFilters) { + if ( + isTraceFilterMatched({ + filter, + block: { number: block.number }, + trace: trace.trace, + }) + ) { + // TODO: includeTransactionReceipt + // return early if includeTransactionReceipt, otherwise continue checking filters + requiredTransactions.add(trace.transactionHash); + isMatched = true; } } - return isCallTraceMatched; + return isMatched; }); //////// // Transactions //////// - const transactions = block.transactions.filter(({ hash }) => - requiredTransactions.has(hash), - ); + const transactions = block.transactions.filter((transaction) => { + let isMatched = requiredTransactions.has(transaction.hash); + for (const filter of transactionFilters) { + if (isTransactionFilterMatched({ filter, block, transaction })) { + requiredTransactions.add(transaction.hash); + requiredTransactionReceipts.add(transaction.hash); + isMatched = true; + } + } + return isMatched; + }); // Validate that filtered logs/callTraces point to valid transaction in the block const blockTransactionsHashes = new Set( @@ -634,24 +776,11 @@ export const createRealtimeSync = ( ), ); - // Filter out call traces from reverted transactions - - const revertedTransactions = new Set(); - for (const receipt of transactionReceipts) { - if (receipt.status === "0x0") { - revertedTransactions.add(receipt.transactionHash); - } - } - - callTraces = callTraces.filter( - (trace) => revertedTransactions.has(trace.transactionHash) === false, - ); - return { block, logs, factoryLogs, - callTraces, + traces, transactions, transactionReceipts, }; @@ -717,6 +846,7 @@ export const createRealtimeSync = ( hexToNumber(latestBlock.number) + MAX_QUEUED_BLOCKS, ), ); + const pendingBlocks = await Promise.all( missingBlockRange.map((blockNumber) => _eth_getBlockByNumber(args.requestQueue, { diff --git a/packages/core/src/sync-store/encoding.ts b/packages/core/src/sync-store/encoding.ts index e153ac65b..9403827af 100644 --- a/packages/core/src/sync-store/encoding.ts +++ b/packages/core/src/sync-store/encoding.ts @@ -1,8 +1,8 @@ import type { FragmentId } from "@/sync/fragments.js"; import type { SyncBlock, - SyncCallTrace, SyncLog, + SyncTrace, SyncTransaction, SyncTransactionReceipt, } from "@/types/sync.js"; @@ -140,6 +140,8 @@ export const encodeLog = ({ type TransactionsTable = { hash: Hash; + chainId: number; + checkpoint: string; blockHash: Hash; blockNumber: ColumnType; from: Address; @@ -161,19 +163,27 @@ type TransactionsTable = { string | bigint > | null; accessList: string | null; - - chainId: number; }; export const encodeTransaction = ({ transaction, + block, chainId, }: { transaction: SyncTransaction; + block: Pick; chainId: number; }): Insertable => { return { hash: transaction.hash, + checkpoint: encodeCheckpoint({ + blockTimestamp: hexToNumber(block.timestamp), + chainId: BigInt(chainId), + blockNumber: hexToBigInt(transaction.blockNumber), + transactionIndex: hexToBigInt(transaction.transactionIndex), + eventType: EVENT_TYPES.transactions, + eventIndex: zeroCheckpoint.eventIndex, + }), chainId, blockHash: transaction.blockHash, blockNumber: hexToBigInt(transaction.blockNumber), @@ -247,54 +257,72 @@ export const encodeTransactionReceipt = ({ }; }; -type CallTracesTable = { +type TracesTable = { id: string; chainId: number; checkpoint: string; - callType: string; + type: string; + transactionHash: Hex; + blockHash: Hex; + blockNumber: ColumnType; from: Address; + to: Address | null; gas: ColumnType; + gasUsed: ColumnType; input: Hex; - to: Address; - value: ColumnType; - blockHash: Hex; - blockNumber: ColumnType; - error: string | null; - gasUsed: ColumnType | null; - output: Hex | null; - subtraces: number; - traceAddress: string; - transactionHash: Hex; - transactionPosition: number; functionSelector: Hex; + output: Hex | null; + error: string | null; + revertReason: string | null; + value: ColumnType< + string | null, + string | bigint | null, + string | bigint | null + >; + index: number; + subcalls: number; + isReverted: number; }; -export function encodeCallTrace({ +export function encodeTrace({ trace, + block, + transaction, chainId, }: { - trace: SyncCallTrace; + trace: Omit; + block: Pick; + transaction: Pick; chainId: number; -}): Insertable> { +}): Insertable { return { - id: `${trace.transactionHash}-${JSON.stringify(trace.traceAddress)}`, + id: `${transaction.hash}-${trace.index}`, chainId, - callType: trace.action.callType, - from: toLowerCase(trace.action.from), - gas: hexToBigInt(trace.action.gas), - input: trace.action.input, - to: toLowerCase(trace.action.to), - value: hexToBigInt(trace.action.value), - blockHash: trace.blockHash, - blockNumber: hexToBigInt(trace.blockNumber), + checkpoint: encodeCheckpoint({ + blockTimestamp: hexToNumber(block.timestamp), + chainId: BigInt(chainId), + blockNumber: hexToBigInt(block.number), + transactionIndex: hexToBigInt(transaction.transactionIndex), + eventType: EVENT_TYPES.traces, + eventIndex: BigInt(trace.index), + }), + type: trace.type, + transactionHash: transaction.hash, + blockHash: block.hash, + blockNumber: hexToBigInt(block.number), + from: toLowerCase(trace.from), + to: trace.to ? toLowerCase(trace.to) : null, + gas: hexToBigInt(trace.gas), + gasUsed: hexToBigInt(trace.gasUsed), + input: trace.input, + functionSelector: trace.input.slice(0, 10) as Hex, + output: trace.output ?? null, + revertReason: trace.revertReason ?? null, error: trace.error ?? null, - gasUsed: trace.result ? hexToBigInt(trace.result.gasUsed) : null, - output: trace.result ? trace.result.output : null, - subtraces: trace.subtraces, - traceAddress: JSON.stringify(trace.traceAddress), - transactionHash: trace.transactionHash, - transactionPosition: trace.transactionPosition, - functionSelector: trace.action.input.slice(0, 10).toLowerCase() as Hex, + value: trace.value ? hexToBigInt(trace.value) : null, + index: trace.index, + subcalls: trace.subcalls, + isReverted: trace.error === undefined ? 0 : 1, }; } @@ -320,7 +348,7 @@ export type PonderSyncSchema = { logs: LogsTable; transactions: TransactionsTable; transactionReceipts: TransactionReceiptsTable; - callTraces: CallTracesTable; + traces: TracesTable; rpc_request_results: RpcRequestResultsTable; diff --git a/packages/core/src/sync-store/index.test.ts b/packages/core/src/sync-store/index.test.ts index 0c87e00f0..1d7e6db3c 100644 --- a/packages/core/src/sync-store/index.test.ts +++ b/packages/core/src/sync-store/index.test.ts @@ -1,19 +1,56 @@ +import { ALICE, BOB } from "@/_test/constants.js"; +import { erc20ABI } from "@/_test/generated.js"; import { setupAnvil, setupCommon, setupDatabaseServices, setupIsolatedDatabase, } from "@/_test/setup.js"; -import { getRawRPCData } from "@/_test/utils.js"; -import type { Factory, LogFactory, LogFilter } from "@/sync/source.js"; +import { + createPair, + deployErc20, + deployFactory, + mintErc20, + swapPair, + transferEth, +} from "@/_test/simulate.js"; +import { + getAccountsConfigAndIndexingFunctions, + getBlocksConfigAndIndexingFunctions, + getErc20ConfigAndIndexingFunctions, + getNetwork, + getPairWithFactoryConfigAndIndexingFunctions, + testClient, +} from "@/_test/utils.js"; +import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; +import type { + BlockFilter, + Factory, + LogFactory, + LogFilter, +} from "@/sync/source.js"; +import type { SyncTrace, SyncTransaction } from "@/types/sync.js"; import { decodeCheckpoint, encodeCheckpoint, maxCheckpoint, zeroCheckpoint, } from "@/utils/checkpoint.js"; -import { _eth_getLogs } from "@/utils/rpc.js"; -import { type Address, hexToNumber } from "viem"; +import { createRequestQueue } from "@/utils/requestQueue.js"; +import { + _eth_getBlockByNumber, + _eth_getLogs, + _eth_getTransactionReceipt, +} from "@/utils/rpc.js"; +import { + type Address, + encodeFunctionData, + encodeFunctionResult, + hexToNumber, + parseEther, + zeroAddress, + zeroHash, +} from "viem"; import { beforeEach, expect, test } from "vitest"; beforeEach(setupCommon); @@ -28,7 +65,7 @@ test("setup creates tables", async (context) => { expect(tableNames).toContain("blocks"); expect(tableNames).toContain("logs"); expect(tableNames).toContain("transactions"); - expect(tableNames).toContain("callTraces"); + expect(tableNames).toContain("traces"); expect(tableNames).toContain("transactionReceipts"); expect(tableNames).toContain("rpc_request_results"); @@ -38,8 +75,17 @@ test("setup creates tables", async (context) => { test("getIntervals() empty", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const filter = { + type: "block", + chainId: 1, + interval: 1, + offset: 0, + fromBlock: undefined, + toBlock: undefined, + } satisfies BlockFilter; + const intervals = await syncStore.getIntervals({ - filters: [context.sources[0].filter], + filters: [filter], }); expect(Array.from(intervals.values())[0]).toHaveLength(0); @@ -50,17 +96,26 @@ test("getIntervals() empty", async (context) => { test("getIntervals() returns intervals", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const filter = { + type: "block", + chainId: 1, + interval: 1, + offset: 0, + fromBlock: undefined, + toBlock: undefined, + } satisfies BlockFilter; + await syncStore.insertIntervals({ intervals: [ { - filter: context.sources[0].filter, + filter, interval: [0, 4], }, ], }); const intervals = await syncStore.getIntervals({ - filters: [context.sources[0].filter], + filters: [filter], }); expect(Array.from(intervals.values())[0]).toHaveLength(1); @@ -72,10 +127,19 @@ test("getIntervals() returns intervals", async (context) => { test("getIntervals() merges intervals", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const filter = { + type: "block", + chainId: 1, + interval: 1, + offset: 0, + fromBlock: undefined, + toBlock: undefined, + } satisfies BlockFilter; + await syncStore.insertIntervals({ intervals: [ { - filter: context.sources[0].filter, + filter, interval: [0, 4], }, ], @@ -84,13 +148,13 @@ test("getIntervals() merges intervals", async (context) => { await syncStore.insertIntervals({ intervals: [ { - filter: context.sources[0].filter, + filter, interval: [5, 8], }, ], }); const intervals = await syncStore.getIntervals({ - filters: [context.sources[0].filter], + filters: [filter], }); expect(Array.from(intervals.values())[0]).toHaveLength(1); @@ -102,10 +166,22 @@ test("getIntervals() merges intervals", async (context) => { test("getIntervals() adjacent intervals", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const filter = { + type: "log", + chainId: 1, + topic0: null, + topic1: null, + topic2: null, + topic3: null, + address: [zeroAddress], + fromBlock: undefined, + toBlock: undefined, + } satisfies LogFilter; + await syncStore.insertIntervals({ intervals: [ { - filter: context.sources[0].filter, + filter, interval: [0, 4], }, ], @@ -114,13 +190,13 @@ test("getIntervals() adjacent intervals", async (context) => { await syncStore.insertIntervals({ intervals: [ { - filter: { ...context.sources[0].filter, address: undefined }, + filter: { ...filter, address: undefined }, interval: [5, 8], }, ], }); const intervals = await syncStore.getIntervals({ - filters: [context.sources[0].filter], + filters: [filter], }); expect(Array.from(intervals.values())[0]).toHaveLength(1); @@ -129,200 +205,50 @@ test("getIntervals() adjacent intervals", async (context) => { await cleanup(); }); -test("getIntervals() handles log filter logic", async (context) => { - const { cleanup, syncStore } = await setupDatabaseServices(context); - - await syncStore.insertIntervals({ - intervals: [ - { - filter: context.sources[0].filter, - interval: [0, 4], - }, - ], - }); - - let intervals = await syncStore.getIntervals({ - filters: [ - { - ...context.sources[0].filter, - includeTransactionReceipts: false, - }, - ], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(1); - expect(Array.from(intervals.values())[0]![0]).toStrictEqual([0, 4]); - - intervals = await syncStore.getIntervals({ - filters: [ - { ...context.sources[0].filter, address: context.factory.address }, - ], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(0); - - await cleanup(); -}); - -test("getIntervals() handles factory log filter logic", async (context) => { - const { cleanup, syncStore } = await setupDatabaseServices(context); - - await syncStore.insertIntervals({ - intervals: [ - { - filter: context.sources[1].filter, - interval: [0, 4], - }, - ], - }); - - let intervals = await syncStore.getIntervals({ - filters: [ - { - ...context.sources[1].filter, - includeTransactionReceipts: false, - }, - ], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(1); - expect(Array.from(intervals.values())[0]![0]).toStrictEqual([0, 4]); - - intervals = await syncStore.getIntervals({ - filters: [ - { - ...context.sources[1].filter, - address: { - ...context.sources[1].filter.address, - childAddressLocation: "topic2", - }, - }, - ], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(0); - - await cleanup(); -}); - -test("getIntervals() handles trace filter logic", async (context) => { - const { cleanup, syncStore } = await setupDatabaseServices(context); - - await syncStore.insertIntervals({ - intervals: [ - { - filter: context.sources[3].filter, - interval: [0, 4], - }, - ], - }); - - let intervals = await syncStore.getIntervals({ - filters: [context.sources[3].filter], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(1); - expect(Array.from(intervals.values())[0]![0]).toStrictEqual([0, 4]); - - intervals = await syncStore.getIntervals({ - filters: [ - { - ...context.sources[3].filter, - toAddress: [context.erc20.address], - }, - ], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(0); - - await cleanup(); -}); - -test("getIntervals() handles factory trace filter logic", async (context) => { - const { cleanup, syncStore } = await setupDatabaseServices(context); - - await syncStore.insertIntervals({ - intervals: [ - { - filter: context.sources[2].filter, - interval: [0, 4], - }, - ], - }); - - let intervals = await syncStore.getIntervals({ - filters: [context.sources[2].filter], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(1); - expect(Array.from(intervals.values())[0]![0]).toStrictEqual([0, 4]); - - intervals = await syncStore.getIntervals({ - filters: [ - { - ...context.sources[2].filter, - toAddress: { - ...context.sources[2].filter.toAddress, - childAddressLocation: "topic2", - }, - }, - ], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(0); - - await cleanup(); -}); - -test("getIntervals() handles block filter logic", async (context) => { +test("getChildAddresses()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - await syncStore.getIntervals({ - filters: [context.sources[4].filter], - }); - - await syncStore.insertIntervals({ - intervals: [ - { - filter: context.sources[4].filter, - interval: [0, 4], - }, - ], - }); - - let intervals = await syncStore.getIntervals({ - filters: [context.sources[4].filter], - }); - - expect(Array.from(intervals.values())[0]).toHaveLength(1); - expect(Array.from(intervals.values())[0]![0]).toStrictEqual([0, 4]); - - intervals = await syncStore.getIntervals({ - filters: [{ ...context.sources[4].filter, interval: 69 }], + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployFactory({ sender: ALICE }); + const { result } = await createPair({ factory: address, sender: ALICE }); + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, }); - expect(Array.from(intervals.values())[0]).toHaveLength(0); - - await cleanup(); -}); - -test("getChildAddresses()", async (context) => { - const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0] }], + logs: [{ log: rpcLogs[0]! }], shouldUpdateCheckpoint: false, chainId: 1, }); + const filter = sources[0]!.filter as LogFilter; + const addresses = await syncStore.getChildAddresses({ - filter: context.sources[1].filter.address as Factory, + filter: filter.address, limit: 10, }); expect(addresses).toHaveLength(1); - expect(addresses[0]).toBe(context.factory.pair); + expect(addresses[0]).toBe(result); await cleanup(); }); @@ -330,8 +256,25 @@ test("getChildAddresses()", async (context) => { test("getChildAddresses() empty", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const { address } = await deployFactory({ sender: ALICE }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const filter = sources[0]!.filter as LogFilter; + const addresses = await syncStore.getChildAddresses({ - filter: context.sources[1].filter.address as Factory, + filter: filter.address, limit: 10, }); @@ -342,21 +285,44 @@ test("getChildAddresses() empty", async (context) => { test("filterChildAddresses()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployFactory({ sender: ALICE }); + const { result } = await createPair({ factory: address, sender: ALICE }); + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0] }], + logs: [{ log: rpcLogs[0]! }], shouldUpdateCheckpoint: false, chainId: 1, }); + const filter = sources[0]!.filter as LogFilter; + const addresses = await syncStore.filterChildAddresses({ - filter: context.sources[1].filter.address as Factory, - addresses: [ - context.erc20.address, - context.factory.address, - context.factory.pair, - ], + filter: filter.address, + addresses: [address, result, zeroAddress], }); expect(addresses.size).toBe(1); @@ -366,11 +332,28 @@ test("filterChildAddresses()", async (context) => { test("insertLogs()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], - shouldUpdateCheckpoint: true, + logs: [{ log: rpcLogs[0]! }], + shouldUpdateCheckpoint: false, chainId: 1, }); @@ -382,16 +365,34 @@ test("insertLogs()", async (context) => { test("insertLogs() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], - shouldUpdateCheckpoint: true, + logs: [{ log: rpcLogs[0]! }], + shouldUpdateCheckpoint: false, chainId: 1, }); + await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], - shouldUpdateCheckpoint: true, + logs: [{ log: rpcLogs[0]! }], + shouldUpdateCheckpoint: false, chainId: 1, }); @@ -403,10 +404,30 @@ test("insertLogs() with duplicates", async (context) => { test("insertLogs() creates checkpoint", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], shouldUpdateCheckpoint: true, chainId: 1, }); @@ -414,11 +435,9 @@ test("insertLogs() creates checkpoint", async (context) => { const logs = await database.qb.sync.selectFrom("logs").selectAll().execute(); const checkpoint = decodeCheckpoint(logs[0]!.checkpoint!); - expect(checkpoint.blockTimestamp).toBe( - hexToNumber(rpcData.block3.block.timestamp), - ); + expect(checkpoint.blockTimestamp).toBe(hexToNumber(rpcBlock.timestamp)); expect(checkpoint.chainId).toBe(1n); - expect(checkpoint.blockNumber).toBe(3n); + expect(checkpoint.blockNumber).toBe(2n); expect(checkpoint.transactionIndex).toBe(0n); expect(checkpoint.eventType).toBe(5); expect(checkpoint.eventIndex).toBe(0n); @@ -428,10 +447,30 @@ test("insertLogs() creates checkpoint", async (context) => { test("insertLogs() upserts checkpoint", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0] }], + logs: [{ log: rpcLogs[0]! }], shouldUpdateCheckpoint: false, chainId: 1, }); @@ -440,7 +479,7 @@ test("insertLogs() upserts checkpoint", async (context) => { expect(logs[0]!.checkpoint).toBe(null); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], shouldUpdateCheckpoint: true, chainId: 1, }); @@ -449,7 +488,7 @@ test("insertLogs() upserts checkpoint", async (context) => { expect(logs[0]!.checkpoint).not.toBe(null); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0] }], + logs: [{ log: rpcLogs[0]! }], shouldUpdateCheckpoint: false, chainId: 1, }); @@ -462,9 +501,19 @@ test("insertLogs() upserts checkpoint", async (context) => { test("insertBlocks()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + await testClient.mine({ blocks: 1 }); + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); const blocks = await database.qb.sync .selectFrom("blocks") @@ -477,10 +526,20 @@ test("insertBlocks()", async (context) => { test("insertBlocks() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + await testClient.mine({ blocks: 1 }); + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); const blocks = await database.qb.sync .selectFrom("blocks") @@ -493,24 +552,29 @@ test("insertBlocks() with duplicates", async (context) => { test("insertBlocks() creates checkpoint", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertBlocks({ - blocks: [rpcData.block3.block], - chainId: 1, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + await testClient.mine({ blocks: 1 }); + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + const blocks = await database.qb.sync .selectFrom("blocks") .selectAll() .execute(); const checkpoint = decodeCheckpoint(blocks[0]!.checkpoint!); - expect(checkpoint.blockTimestamp).toBe( - hexToNumber(rpcData.block3.block.timestamp), - ); + expect(checkpoint.blockTimestamp).toBe(hexToNumber(rpcBlock.timestamp)); expect(checkpoint.chainId).toBe(1n); - expect(checkpoint.blockNumber).toBe(3n); + expect(checkpoint.blockNumber).toBe(1n); expect(checkpoint.transactionIndex).toBe(maxCheckpoint.transactionIndex); expect(checkpoint.eventType).toBe(5); expect(checkpoint.eventIndex).toBe(0n); @@ -520,16 +584,27 @@ test("insertBlocks() creates checkpoint", async (context) => { test("hasBlock()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + await testClient.mine({ blocks: 1 }); + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + let block = await syncStore.hasBlock({ - hash: rpcData.block3.block.hash, + hash: rpcBlock.hash, }); expect(block).toBe(true); block = await syncStore.hasBlock({ - hash: rpcData.block2.block.hash, + hash: zeroHash, }); expect(block).toBe(false); @@ -538,10 +613,26 @@ test("hasBlock()", async (context) => { test("insertTransactions()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); @@ -556,14 +647,30 @@ test("insertTransactions()", async (context) => { test("insertTransactions() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); @@ -578,19 +685,36 @@ test("insertTransactions() with duplicates", async (context) => { test("hasTransaction()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + let transaction = await syncStore.hasTransaction({ - hash: rpcData.block3.transactions[0].hash, + hash, }); expect(transaction).toBe(true); transaction = await syncStore.hasTransaction({ - hash: rpcData.block2.transactions[0].hash, + hash: zeroHash, }); expect(transaction).toBe(false); @@ -599,10 +723,27 @@ test("hasTransaction()", async (context) => { test("insertTransactionReceipts()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + hash, + }); await syncStore.insertTransactionReceipts({ - transactionReceipts: rpcData.block3.transactionReceipts, + transactionReceipts: [rpcTransactionReceipt], chainId: 1, }); @@ -617,14 +758,31 @@ test("insertTransactionReceipts()", async (context) => { test("insertTransactionReceipts() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + hash, + }); await syncStore.insertTransactionReceipts({ - transactionReceipts: rpcData.block3.transactionReceipts, + transactionReceipts: [rpcTransactionReceipt], chainId: 1, }); await syncStore.insertTransactionReceipts({ - transactionReceipts: rpcData.block3.transactionReceipts, + transactionReceipts: [rpcTransactionReceipt], chainId: 1, }); @@ -639,38 +797,101 @@ test("insertTransactionReceipts() with duplicates", async (context) => { test("hasTransactionReceipt()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + hash, + }); await syncStore.insertTransactionReceipts({ - transactionReceipts: rpcData.block3.transactionReceipts, + transactionReceipts: [rpcTransactionReceipt], chainId: 1, }); + let transaction = await syncStore.hasTransactionReceipt({ - hash: rpcData.block3.transactionReceipts[0].transactionHash, + hash: rpcTransactionReceipt.transactionHash, }); expect(transaction).toBe(true); transaction = await syncStore.hasTransactionReceipt({ - hash: rpcData.block2.transactionReceipts[0].transactionHash, + hash: zeroHash, }); expect(transaction).toBe(false); await cleanup(); }); -test("insertCallTraces()", async (context) => { +test("insertTraces()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertCallTraces({ - callTraces: [ - { callTrace: rpcData.block3.callTraces[0], block: rpcData.block3.block }, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const trace = { + trace: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + await syncStore.insertTraces({ + traces: [ + { + trace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, ], chainId: 1, }); const traces = await database.qb.sync - .selectFrom("callTraces") + .selectFrom("traces") .selectAll() .execute(); expect(traces).toHaveLength(1); @@ -678,28 +899,71 @@ test("insertCallTraces()", async (context) => { await cleanup(); }); -test("insertCallTraces() creates checkpoint", async (context) => { +test("insertTraces() creates checkpoint", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertCallTraces({ - callTraces: [ - { callTrace: rpcData.block3.callTraces[0], block: rpcData.block3.block }, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const trace = { + trace: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + + await syncStore.insertTraces({ + traces: [ + { + trace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, ], chainId: 1, }); const traces = await database.qb.sync - .selectFrom("callTraces") + .selectFrom("traces") .selectAll() .execute(); const checkpoint = decodeCheckpoint(traces[0]!.checkpoint!); - expect(checkpoint.blockTimestamp).toBe( - hexToNumber(rpcData.block3.block.timestamp), - ); + expect(checkpoint.blockTimestamp).toBe(hexToNumber(rpcBlock.timestamp)); expect(checkpoint.chainId).toBe(1n); - expect(checkpoint.blockNumber).toBe(3n); + expect(checkpoint.blockNumber).toBe(1n); expect(checkpoint.transactionIndex).toBe(0n); expect(checkpoint.eventType).toBe(7); expect(checkpoint.eventIndex).toBe(0n); @@ -707,25 +971,74 @@ test("insertCallTraces() creates checkpoint", async (context) => { await cleanup(); }); -test("insertCallTraces() with duplicates", async (context) => { +test("insertTraces() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertCallTraces({ - callTraces: [ - { callTrace: rpcData.block3.callTraces[0], block: rpcData.block3.block }, - ], - chainId: 1, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const trace = { + trace: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, }); - await syncStore.insertCallTraces({ - callTraces: [ - { callTrace: rpcData.block3.callTraces[0], block: rpcData.block3.block }, + + await syncStore.insertTraces({ + traces: [ + { + trace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, + ], + chainId: 1, + }); + await syncStore.insertTraces({ + traces: [ + { + trace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, ], chainId: 1, }); const traces = await database.qb.sync - .selectFrom("callTraces") + .selectFrom("traces") .selectAll() .execute(); expect(traces).toHaveLength(1); @@ -735,27 +1048,51 @@ test("insertCallTraces() with duplicates", async (context) => { test("getEvents() returns events", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], shouldUpdateCheckpoint: true, chainId: 1, }); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); const filter = { type: "log", chainId: 1, address: undefined, - topics: [null], - fromBlock: 0, - toBlock: 5, - includeTransactionReceipts: false, + topic0: null, + topic1: null, + topic2: null, + topic3: null, + fromBlock: undefined, + toBlock: undefined, + // includeTransactionReceipts: false, } satisfies LogFilter; const { events } = await syncStore.getEvents({ @@ -772,67 +1109,169 @@ test("getEvents() returns events", async (context) => { test("getEvents() handles log filter logic", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertLogs({ - logs: [ - { log: rpcData.block2.logs[0], block: rpcData.block2.block }, - { log: rpcData.block2.logs[1], block: rpcData.block2.block }, - ], - shouldUpdateCheckpoint: true, - chainId: 1, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + // noisy data + const { address: factory } = await deployFactory({ sender: ALICE }); + await createPair({ factory, sender: ALICE }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, }); - await syncStore.insertBlocks({ blocks: [rpcData.block2.block], chainId: 1 }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + await syncStore.insertTransactions({ - transactions: rpcData.block2.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + let rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], shouldUpdateCheckpoint: true, chainId: 1, }); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); + + // noisy data + + rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 4, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 4, + toBlock: 4, + }); + syncStore.insertLogs({ + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], + shouldUpdateCheckpoint: true, chainId: 1, }); const { events } = await syncStore.getEvents({ - filters: [context.sources[0].filter], + filters: [sources[0]!.filter], from: encodeCheckpoint(zeroCheckpoint), to: encodeCheckpoint(maxCheckpoint), limit: 10, }); - expect(events).toHaveLength(2); + expect(events).toHaveLength(1); await cleanup(); }); test("getEvents() handles log factory", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], - shouldUpdateCheckpoint: true, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address: factory } = await deployFactory({ sender: ALICE }); + const { result: pair } = await createPair({ factory, sender: ALICE }); + await swapPair({ + pair, + sender: ALICE, + amount0Out: 1n, + amount1Out: 1n, + to: ALICE, + }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address: factory, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // factory + + let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + + let rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block4.logs[0], block: rpcData.block4.block }], - shouldUpdateCheckpoint: true, + logs: [{ log: rpcLogs[0]! }], + shouldUpdateCheckpoint: false, chainId: 1, }); - await syncStore.insertBlocks({ blocks: [rpcData.block4.block], chainId: 1 }); + + // pair + + rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 3, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + await syncStore.insertTransactions({ - transactions: rpcData.block4.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 3, + toBlock: 3, + }); + await syncStore.insertLogs({ + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], + shouldUpdateCheckpoint: true, chainId: 1, }); const { events } = await syncStore.getEvents({ - filters: [context.sources[1].filter], + filters: [sources[0]!.filter], from: encodeCheckpoint(zeroCheckpoint), to: encodeCheckpoint(maxCheckpoint), limit: 10, @@ -845,37 +1284,135 @@ test("getEvents() handles log factory", async (context) => { test("getEvents() handles multiple log factories", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], - shouldUpdateCheckpoint: true, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address: factory } = await deployFactory({ sender: ALICE }); + const { result: pair } = await createPair({ factory, sender: ALICE }); + await swapPair({ + pair, + sender: ALICE, + amount0Out: 1n, + amount1Out: 1n, + to: ALICE, + }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address: factory, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // factory + + let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + + let rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block4.logs[0], block: rpcData.block4.block }], - shouldUpdateCheckpoint: true, + logs: [{ log: rpcLogs[0]! }], + shouldUpdateCheckpoint: false, chainId: 1, }); - await syncStore.insertBlocks({ blocks: [rpcData.block4.block], chainId: 1 }); + + // pair + + rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 3, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + await syncStore.insertTransactions({ - transactions: rpcData.block4.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); - context.sources[1].filter = { - ...context.sources[1].filter, - address: { - ...context.sources[1].filter.address, - address: [ - context.sources[1].filter.address.address as Address, - context.sources[1].filter.address.address as Address, - ], + rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 3, + toBlock: 3, + }); + await syncStore.insertLogs({ + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], + shouldUpdateCheckpoint: true, + chainId: 1, + }); + + const filter = sources[0]!.filter as LogFilter; + + filter.address.address = [ + filter.address.address as Address, + filter.address.address as Address, + zeroAddress, + ]; + + const { events } = await syncStore.getEvents({ + filters: [filter], + from: encodeCheckpoint(zeroCheckpoint), + to: encodeCheckpoint(maxCheckpoint), + limit: 10, + }); + + expect(events).toHaveLength(1); + + await cleanup(); +}); + +test("getEvents() handles block filter logic", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + await testClient.mine({ blocks: 2 }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 2, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", }, - } satisfies LogFilter; + }); + + let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); const { events } = await syncStore.getEvents({ - filters: [context.sources[1].filter], + filters: [sources[0]!.filter], from: encodeCheckpoint(zeroCheckpoint), to: encodeCheckpoint(maxCheckpoint), limit: 10, @@ -888,26 +1425,139 @@ test("getEvents() handles multiple log factories", async (context) => { test("getEvents() handles trace filter logic", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertCallTraces({ - callTraces: [ - { callTrace: rpcData.block3.callTraces[0], block: rpcData.block3.block }, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + includeCallTraces: true, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + const rpcTrace = { + trace: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + await syncStore.insertTraces({ + traces: [ + { + trace: rpcTrace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, ], chainId: 1, }); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); + + const { events } = await syncStore.getEvents({ + filters: sources.map((source) => source.filter), + from: encodeCheckpoint(zeroCheckpoint), + to: encodeCheckpoint(maxCheckpoint), + limit: 10, + }); + + expect(events).toHaveLength(1); + + await cleanup(); +}); + +test("getEvents() handles transaction filter logic", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { hash } = await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + + const rpcReceipt = await _eth_getTransactionReceipt(requestQueue, { hash }); + await syncStore.insertTransactionReceipts({ - transactionReceipts: rpcData.block3.transactionReceipts, + transactionReceipts: [rpcReceipt], chainId: 1, }); const { events } = await syncStore.getEvents({ - filters: [context.sources[3].filter], + filters: sources.map((source) => source.filter), from: encodeCheckpoint(zeroCheckpoint), to: encodeCheckpoint(maxCheckpoint), limit: 10, @@ -918,22 +1568,87 @@ test("getEvents() handles trace filter logic", async (context) => { await cleanup(); }); -test("getEvents() handles block filter logic", async (context) => { +test("getEvents() handles transfer filter logic", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertBlocks({ blocks: [rpcData.block2.block], chainId: 1 }); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); - await syncStore.insertBlocks({ blocks: [rpcData.block4.block], chainId: 1 }); - await syncStore.insertBlocks({ blocks: [rpcData.block5.block], chainId: 1 }); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { hash } = await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + const rpcReceipt = await _eth_getTransactionReceipt(requestQueue, { hash }); + + await syncStore.insertTransactionReceipts({ + transactionReceipts: [rpcReceipt], + chainId: 1, + }); + + const rpcTrace = { + trace: { + type: "CALL", + from: ALICE, + to: BOB, + gas: "0x0", + gasUsed: "0x0", + input: "0x0", + output: "0x0", + value: rpcBlock.transactions[0]!.value, + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + await syncStore.insertTraces({ + traces: [ + { + trace: rpcTrace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, + ], + chainId: 1, + }); const { events } = await syncStore.getEvents({ - filters: [context.sources[4].filter], + filters: sources.map((source) => source.filter), from: encodeCheckpoint(zeroCheckpoint), to: encodeCheckpoint(maxCheckpoint), limit: 10, }); + // transaction:from and transfer:from expect(events).toHaveLength(2); await cleanup(); @@ -941,42 +1656,58 @@ test("getEvents() handles block filter logic", async (context) => { test("getEvents() handles block bounds", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertLogs({ - logs: [ - { log: rpcData.block2.logs[0], block: rpcData.block2.block }, - { log: rpcData.block2.logs[1], block: rpcData.block2.block }, - ], - shouldUpdateCheckpoint: true, - chainId: 1, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, }); - await syncStore.insertBlocks({ blocks: [rpcData.block2.block], chainId: 1 }); - await syncStore.insertTransactions({ - transactions: rpcData.block2.transactions, - chainId: 1, + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + await syncStore.insertTransactions({ - transactions: rpcData.block2.transactions, + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], shouldUpdateCheckpoint: true, chainId: 1, }); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); - await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, - chainId: 1, - }); - const filter = context.sources[0].filter; + const filter = sources[0]!.filter as LogFilter; filter.toBlock = 1; const { events } = await syncStore.getEvents({ - filters: [filter], + filters: [sources[0]!.filter], from: encodeCheckpoint(zeroCheckpoint), to: encodeCheckpoint(maxCheckpoint), limit: 10, @@ -989,28 +1720,39 @@ test("getEvents() handles block bounds", async (context) => { test("getEvents() pagination", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertLogs({ - logs: [ - { log: rpcData.block2.logs[0], block: rpcData.block2.block }, - { log: rpcData.block2.logs[1], block: rpcData.block2.block }, - ], - shouldUpdateCheckpoint: true, - chainId: 1, + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, }); - await syncStore.insertBlocks({ blocks: [rpcData.block2.block], chainId: 1 }); - await syncStore.insertTransactions({ - transactions: rpcData.block2.transactions, - chainId: 1, + + await testClient.mine({ blocks: 2 }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, }); - await syncStore.insertTransactions({ - transactions: rpcData.block2.transactions, - chainId: 1, + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); const { events, cursor } = await syncStore.getEvents({ - filters: [context.sources[0].filter], + filters: [sources[0]!.filter], from: encodeCheckpoint(zeroCheckpoint), to: encodeCheckpoint(maxCheckpoint), limit: 1, @@ -1019,7 +1761,7 @@ test("getEvents() pagination", async (context) => { expect(events).toHaveLength(1); const { events: events2 } = await syncStore.getEvents({ - filters: [context.sources[0].filter], + filters: [sources[0]!.filter], from: cursor, to: encodeCheckpoint(maxCheckpoint), limit: 1, @@ -1075,50 +1817,133 @@ test("pruneRpcRequestResult", async (context) => { test("pruneByChain deletes blocks, logs, traces, transactions", async (context) => { const { syncStore, database, cleanup } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - await syncStore.insertBlocks({ blocks: [rpcData.block2.block], chainId: 1 }); + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash: hash1 } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const { hash: hash2 } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + // block 2 (first mint) + + let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + let rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); await syncStore.insertLogs({ - logs: [ - { log: rpcData.block2.logs[0], block: rpcData.block2.block }, - { log: rpcData.block2.logs[1], block: rpcData.block2.block }, - ], + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], shouldUpdateCheckpoint: true, chainId: 1, }); - await syncStore.insertTransactions({ - transactions: rpcData.block2.transactions, - chainId: 1, + + let rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + hash: hash1, }); + await syncStore.insertTransactionReceipts({ - transactionReceipts: rpcData.block2.transactionReceipts, + transactionReceipts: [rpcTransactionReceipt], chainId: 1, }); - await syncStore.insertCallTraces({ - callTraces: [ - { callTrace: rpcData.block2.callTraces[0], block: rpcData.block2.block }, - { callTrace: rpcData.block2.callTraces[1], block: rpcData.block2.block }, + + const rpcTrace = { + trace: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + index: 0, + subcalls: 0, + }, + transactionHash: hash1, + } satisfies SyncTrace; + + await syncStore.insertTraces({ + traces: [ + { + trace: rpcTrace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, ], chainId: 1, }); - await syncStore.insertBlocks({ blocks: [rpcData.block3.block], chainId: 1 }); + // block 3 (second mint) + + rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 3, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 3, + toBlock: 3, + }); await syncStore.insertLogs({ - logs: [{ log: rpcData.block3.logs[0], block: rpcData.block3.block }], + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], shouldUpdateCheckpoint: true, chainId: 1, }); - await syncStore.insertTransactions({ - transactions: rpcData.block3.transactions, - chainId: 1, + + rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + hash: hash1, }); + await syncStore.insertTransactionReceipts({ - transactionReceipts: rpcData.block3.transactionReceipts, + transactionReceipts: [rpcTransactionReceipt], chainId: 1, }); - await syncStore.insertCallTraces({ - callTraces: [ - { callTrace: rpcData.block3.callTraces[0], block: rpcData.block3.block }, + + rpcTrace.transactionHash = hash2; + + await syncStore.insertTraces({ + traces: [ + { + trace: rpcTrace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, ], chainId: 1, }); @@ -1130,8 +1955,8 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) .selectFrom("blocks") .selectAll() .execute(); - const callTraces = await database.qb.sync - .selectFrom("callTraces") + const traces = await database.qb.sync + .selectFrom("traces") .selectAll() .execute(); const transactions = await database.qb.sync @@ -1143,11 +1968,11 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) .selectAll() .execute(); - expect(logs).toHaveLength(2); + expect(logs).toHaveLength(1); expect(blocks).toHaveLength(1); - expect(callTraces).toHaveLength(2); - expect(transactions).toHaveLength(2); - expect(transactionReceipts).toHaveLength(2); + expect(traces).toHaveLength(1); + expect(transactions).toHaveLength(1); + expect(transactionReceipts).toHaveLength(1); await cleanup(); }); diff --git a/packages/core/src/sync-store/index.ts b/packages/core/src/sync-store/index.ts index 602feaed1..ad97ecc39 100644 --- a/packages/core/src/sync-store/index.ts +++ b/packages/core/src/sync-store/index.ts @@ -4,32 +4,27 @@ import type { RawEvent } from "@/sync/events.js"; import { getFragmentIds } from "@/sync/fragments.js"; import { type BlockFilter, - type CallTraceFilter, type Factory, type Filter, type LogFactory, type LogFilter, + type TraceFilter, + type TransactionFilter, + type TransferFilter, isAddressFactory, } from "@/sync/source.js"; -import type { CallTrace, Log, TransactionReceipt } from "@/types/eth.js"; +import type { Log, Trace } from "@/types/eth.js"; import type { LightBlock, SyncBlock, - SyncCallTrace, SyncLog, + SyncTrace, SyncTransaction, SyncTransactionReceipt, } from "@/types/sync.js"; import type { NonNull } from "@/types/utils.js"; -import { EVENT_TYPES, encodeCheckpoint } from "@/utils/checkpoint.js"; import { type Interval, intervalIntersectionMany } from "@/utils/interval.js"; -import { - type Insertable, - type Kysely, - type SelectQueryBuilder, - sql as ksql, - sql, -} from "kysely"; +import { type Kysely, type SelectQueryBuilder, sql as ksql, sql } from "kysely"; import type { InsertObject } from "kysely"; import { type Address, @@ -37,13 +32,12 @@ import { type Hex, checksumAddress, hexToBigInt, - hexToNumber, } from "viem"; import { type PonderSyncSchema, encodeBlock, - encodeCallTrace, encodeLog, + encodeTrace, encodeTransaction, encodeTransactionReceipt, } from "./encoding.js"; @@ -75,7 +69,7 @@ export type SyncStore = { /** Return true if the block receipt is present in the database. */ hasBlock(args: { hash: Hash }): Promise; insertTransactions(args: { - transactions: SyncTransaction[]; + transactions: { transaction: SyncTransaction; block: SyncBlock }[]; chainId: number; }): Promise; /** Return true if the transaction is present in the database. */ @@ -86,8 +80,12 @@ export type SyncStore = { }): Promise; /** Return true if the transaction receipt is present in the database. */ hasTransactionReceipt(args: { hash: Hash }): Promise; - insertCallTraces(args: { - callTraces: { callTrace: SyncCallTrace; block: SyncBlock }[]; + insertTraces(args: { + traces: { + trace: SyncTrace; + block: SyncBlock; + transaction: SyncTransaction; + }[]; chainId: number; }): Promise; /** Returns an ordered list of events based on the `filters` and pagination arguments. */ @@ -357,7 +355,8 @@ export const createSyncStore = ({ common.options.databaseMaxQueryParameters / Object.keys( encodeTransaction({ - transaction: transactions[0]!, + transaction: transactions[0]!.transaction, + block: transactions[0]!.block, chainId, }), ).length, @@ -369,8 +368,8 @@ export const createSyncStore = ({ .values( transactions .slice(i, i + batchSize) - .map((transaction) => - encodeTransaction({ transaction, chainId }), + .map(({ transaction, block }) => + encodeTransaction({ transaction, block, chainId }), ), ) .onConflict((oc) => oc.column("hash").doNothing()) @@ -429,89 +428,48 @@ export const createSyncStore = ({ .executeTakeFirst() .then((result) => result !== undefined); }), - insertCallTraces: async ({ callTraces, chainId }) => { - if (callTraces.length === 0) return; - await db.wrap({ method: "insertCallTrace" }, async () => { - // Delete existing traces with the same `transactionHash`. Then, calculate "callTraces.checkpoint" - // based on the ordering of "callTraces.traceAddress" and add all traces to "callTraces" table. - const traceByTransactionHash: { - [transactionHash: Hex]: { traces: SyncCallTrace[]; block: SyncBlock }; - } = {}; + insertTraces: async ({ traces, chainId }) => { + if (traces.length === 0) return; + await db.wrap({ method: "insertTraces" }, async () => { + // Calculate `batchSize` based on how many parameters the + // input will have + const batchSize = Math.floor( + common.options.databaseMaxQueryParameters / + Object.keys( + encodeTrace({ + trace: traces[0]!.trace.trace, + block: traces[0]!.block, + transaction: traces[0]!.transaction, + chainId, + }), + ).length, + ); - for (const { callTrace, block } of callTraces) { - if (traceByTransactionHash[callTrace.transactionHash] === undefined) { - traceByTransactionHash[callTrace.transactionHash] = { - traces: [], - block, - }; - } - traceByTransactionHash[callTrace.transactionHash]!.traces.push( - callTrace, - ); + for (let i = 0; i < traces.length; i += batchSize) { + await db + .insertInto("traces") + .values( + traces + .slice(i, i + batchSize) + .map(({ trace, block, transaction }) => + encodeTrace({ + trace: trace.trace, + block, + transaction, + chainId, + }), + ), + ) + .onConflict((oc) => oc.column("id").doNothing()) + .execute(); } - - const values: Insertable[] = []; - - await db.transaction().execute(async (tx) => { - for (const transactionHash of Object.keys(traceByTransactionHash)) { - const block = traceByTransactionHash[transactionHash as Hex]!.block; - const traces = await tx - .deleteFrom("callTraces") - .returningAll() - .where("transactionHash", "=", transactionHash as Hex) - .where("chainId", "=", chainId) - .execute(); - - traces.push( - // @ts-ignore - ...traceByTransactionHash[transactionHash as Hex]!.traces.map( - (trace) => encodeCallTrace({ trace, chainId }), - ), - ); - - // Use lexographical sort of stringified `traceAddress`. - traces.sort((a, b) => { - return a.traceAddress < b.traceAddress ? -1 : 1; - }); - - for (let i = 0; i < traces.length; i++) { - const trace = traces[i]!; - - const checkpoint = encodeCheckpoint({ - blockTimestamp: hexToNumber(block.timestamp), - chainId: BigInt(chainId), - blockNumber: hexToBigInt(block.number), - transactionIndex: BigInt(trace.transactionPosition), - eventType: EVENT_TYPES.callTraces, - eventIndex: BigInt(i), - }); - trace.checkpoint = checkpoint; - values.push(trace); - } - } - - // Calculate `batchSize` based on how many parameters the - // input will have - const batchSize = Math.floor( - common.options.databaseMaxQueryParameters / - Object.keys(values[0]!).length, - ); - - for (let i = 0; i < values.length; i += batchSize) { - await tx - .insertInto("callTraces") - .values(values.slice(i, i + batchSize)) - .onConflict((oc) => oc.column("id").doNothing()) - .execute(); - } - }); }); }, getEvents: async ({ filters, from, to, limit }) => { const addressSQL = ( qb: SelectQueryBuilder< PonderSyncSchema, - "logs" | "blocks" | "callTraces", + "logs" | "blocks" | "traces", {} >, address: LogFilter["address"], @@ -544,14 +502,13 @@ export const createSyncStore = ({ "blockHash", "transactionHash", "id as logId", - ksql`null`.as("callTraceId"), + ksql`null`.as("traceId"), ]) .where("chainId", "=", filter.chainId) - .$if(filter.topics !== undefined, (qb) => { - for (const idx_ of [0, 1, 2, 3]) { - const idx = idx_ as 0 | 1 | 2 | 3; + .$call((qb) => { + for (const idx of [0, 1, 2, 3] as const) { // If it's an array of length 1, collapse it. - const raw = filter.topics![idx] ?? null; + const raw = filter[`topic${idx}`] ?? null; if (raw === null) continue; const topic = Array.isArray(raw) && raw.length === 1 ? raw[0]! : raw; @@ -566,64 +523,145 @@ export const createSyncStore = ({ return qb; }) .$call((qb) => addressSQL(qb as any, filter.address, "address")) - .where("blockNumber", ">=", filter.fromBlock.toString()) + .$if(filter.fromBlock !== undefined, (qb) => + qb.where("blockNumber", ">=", filter.fromBlock!.toString()), + ) .$if(filter.toBlock !== undefined, (qb) => qb.where("blockNumber", "<=", filter.toBlock!.toString()), ); - const callTraceSQL = ( - filter: CallTraceFilter, + const blockSQL = ( + filter: BlockFilter, db: Kysely, index: number, ) => db - .selectFrom("callTraces") + .selectFrom("blocks") + .select([ + ksql.raw(`'${index}'`).as("filterIndex"), + "checkpoint", + "chainId", + "hash as blockHash", + ksql`null`.as("transactionHash"), + ksql`null`.as("logId"), + ksql`null`.as("traceId"), + ]) + .where("chainId", "=", filter.chainId) + .$if(filter !== undefined && filter.interval !== undefined, (qb) => + qb.where(ksql`(number - ${filter.offset}) % ${filter.interval} = 0`), + ) + .$if(filter.fromBlock !== undefined, (qb) => + qb.where("number", ">=", filter.fromBlock!.toString()), + ) + .$if(filter.toBlock !== undefined, (qb) => + qb.where("number", "<=", filter.toBlock!.toString()), + ); + + const transactionSQL = ( + filter: TransactionFilter, + db: Kysely, + index: number, + ) => + db + .selectFrom("transactions") .select([ ksql.raw(`'${index}'`).as("filterIndex"), "checkpoint", "chainId", "blockHash", - "transactionHash", + "hash as transactionHash", ksql`null`.as("logId"), - "id as callTraceId", + ksql`null`.as("traceId"), ]) .where("chainId", "=", filter.chainId) - .where((eb) => - eb.or( - filter.functionSelectors.map((fs) => - eb("callTraces.functionSelector", "=", fs), - ), + .$call((qb) => addressSQL(qb as any, filter.fromAddress, "from")) + .$call((qb) => addressSQL(qb as any, filter.toAddress, "to")) + .$if(filter.includeReverted === false, (qb) => + qb.where( + db + .selectFrom("transactionReceipts") + .select("status") + .where( + "transactionReceipts.transactionHash", + "=", + sql.ref("transactions.hash"), + ), + "=", + "0x1", ), ) - .where(ksql`${ksql.ref("callTraces.error")} IS NULL`) + .$if(filter.fromBlock !== undefined, (qb) => + qb.where("blockNumber", ">=", filter.fromBlock!.toString()), + ) + .$if(filter.toBlock !== undefined, (qb) => + qb.where("blockNumber", "<=", filter.toBlock!.toString()), + ); + + const transferSQL = ( + filter: TransferFilter, + db: Kysely, + index: number, + ) => + db + .selectFrom("traces") + .select([ + ksql.raw(`'${index}'`).as("filterIndex"), + "checkpoint", + "chainId", + "blockHash", + "transactionHash", + ksql`null`.as("logId"), + "id as traceId", + ]) + .where("chainId", "=", filter.chainId) .$call((qb) => addressSQL(qb as any, filter.fromAddress, "from")) - .$call((qb) => addressSQL(qb, filter.toAddress, "to")) - .where("blockNumber", ">=", filter.fromBlock.toString()) + .$call((qb) => addressSQL(qb as any, filter.toAddress, "to")) + .where("value", ">", "0") + .$if(filter.includeReverted === false, (qb) => + qb.where("isReverted", "=", 0), + ) + .$if(filter.fromBlock !== undefined, (qb) => + qb.where("blockNumber", ">=", filter.fromBlock!.toString()), + ) .$if(filter.toBlock !== undefined, (qb) => qb.where("blockNumber", "<=", filter.toBlock!.toString()), ); - const blockSQL = ( - filter: BlockFilter, + const traceSQL = ( + filter: TraceFilter, db: Kysely, index: number, ) => db - .selectFrom("blocks") + .selectFrom("traces") .select([ ksql.raw(`'${index}'`).as("filterIndex"), "checkpoint", "chainId", - "hash as blockHash", - ksql`null`.as("transactionHash"), + "blockHash", + "transactionHash", ksql`null`.as("logId"), - ksql`null`.as("callTraceId"), + "id as traceId", ]) .where("chainId", "=", filter.chainId) - .$if(filter !== undefined && filter.interval !== undefined, (qb) => - qb.where(ksql`(number - ${filter.offset}) % ${filter.interval} = 0`), + .$call((qb) => addressSQL(qb as any, filter.fromAddress, "from")) + .$call((qb) => addressSQL(qb as any, filter.toAddress, "to")) + .$if(filter.includeReverted === false, (qb) => + qb.where("isReverted", "=", 0), + ) + .$if(filter.callType !== undefined, (qb) => + qb.where("type", "=", filter.callType!), + ) + .$if(filter.functionSelector !== undefined, (qb) => { + if (Array.isArray(filter.functionSelector)) { + return qb.where("functionSelector", "in", filter.functionSelector!); + } else { + return qb.where("functionSelector", "=", filter.functionSelector!); + } + }) + .$if(filter.fromBlock !== undefined, (qb) => + qb.where("number", ">=", filter.fromBlock!.toString()), ) - .where("number", ">=", filter.fromBlock.toString()) .$if(filter.toBlock !== undefined, (qb) => qb.where("number", "<=", filter.toBlock!.toString()), ); @@ -639,15 +677,15 @@ export const createSyncStore = ({ let query: | SelectQueryBuilder< PonderSyncSchema, - "logs" | "callTraces" | "blocks", + "logs" | "blocks" | "traces" | "transactions", { filterIndex: number; checkpoint: string; chainId: number; + logId: string; blockHash: string; transactionHash: string; - logId: string; - callTraceId: string; + traceId: string; } > | undefined; @@ -658,9 +696,13 @@ export const createSyncStore = ({ const _query = filter.type === "log" ? logSQL(filter, db, i) - : filter.type === "callTrace" - ? callTraceSQL(filter, db, i) - : blockSQL(filter, db, i); + : filter.type === "block" + ? blockSQL(filter, db, i) + : filter.type === "transaction" + ? transactionSQL(filter, db, i) + : filter.type === "transfer" + ? transferSQL(filter, db, i) + : traceSQL(filter, db, i); // @ts-ignore query = query === undefined ? _query : query.unionAll(_query); @@ -736,23 +778,21 @@ export const createSyncStore = ({ "transactions.value as tx_value", "transactions.v as tx_v", ]) - .leftJoin("callTraces", "callTraces.id", "event.callTraceId") + .leftJoin("traces", "traces.id", "event.traceId") .select([ - "callTraces.id as callTrace_id", - "callTraces.callType as callTrace_callType", - "callTraces.from as callTrace_from", - "callTraces.gas as callTrace_gas", - "callTraces.input as callTrace_input", - "callTraces.to as callTrace_to", - "callTraces.value as callTrace_value", - "callTraces.blockHash as callTrace_blockHash", - "callTraces.blockNumber as callTrace_blockNumber", - "callTraces.gasUsed as callTrace_gasUsed", - "callTraces.output as callTrace_output", - "callTraces.subtraces as callTrace_subtraces", - "callTraces.traceAddress as callTrace_traceAddress", - "callTraces.transactionHash as callTrace_transactionHash", - "callTraces.transactionPosition as callTrace_transactionPosition", + "traces.id as trace_id", + "traces.type as trace_callType", + "traces.from as trace_from", + "traces.to as trace_to", + "traces.gas as trace_gas", + "traces.gasUsed as trace_gasUsed", + "traces.input as trace_input", + "traces.output as trace_output", + "traces.error as trace_error", + "traces.revertReason as trace_revertReason", + "traces.value as trace_value", + "traces.index as trace_index", + "traces.subcalls as trace_subcalls", ]) .leftJoin( "transactionReceipts", @@ -767,7 +807,6 @@ export const createSyncStore = ({ "transactionReceipts.effectiveGasPrice as txr_effectiveGasPrice", "transactionReceipts.from as txr_from", "transactionReceipts.gasUsed as txr_gasUsed", - "transactionReceipts.logs as txr_logs", "transactionReceipts.logsBloom as txr_logsBloom", "transactionReceipts.status as txr_status", "transactionReceipts.to as txr_to", @@ -794,10 +833,7 @@ export const createSyncStore = ({ const hasLog = row.log_id !== null; const hasTransaction = row.tx_hash !== null; - const hasCallTrace = row.callTrace_id !== null; - const hasTransactionReceipt = - (filter.type === "log" || filter.type === "callTrace") && - filter.includeTransactionReceipts; + const hasTrace = row.trace_id !== null; return { chainId: filter.chainId, @@ -900,75 +936,72 @@ export const createSyncStore = ({ }), } : undefined, - trace: hasCallTrace - ? { - id: row.callTrace_id, - from: checksumAddress(row.callTrace_from), - to: checksumAddress(row.callTrace_to), - gas: BigInt(row.callTrace_gas), - value: BigInt(row.callTrace_value), - input: row.callTrace_input, - output: row.callTrace_output, - gasUsed: BigInt(row.callTrace_gasUsed), - subtraces: row.callTrace_subtraces, - traceAddress: JSON.parse(row.callTrace_traceAddress), - blockHash: row.callTrace_blockHash, - blockNumber: BigInt(row.callTrace_blockNumber), - transactionHash: row.callTrace_transactionHash, - transactionIndex: row.callTrace_transactionPosition, - callType: row.callTrace_callType as CallTrace["callType"], - } - : undefined, - transactionReceipt: hasTransactionReceipt + trace: hasTrace ? { - blockHash: row.txr_blockHash, - blockNumber: BigInt(row.txr_blockNumber), - contractAddress: row.txr_contractAddress - ? checksumAddress(row.txr_contractAddress) - : null, - cumulativeGasUsed: BigInt(row.txr_cumulativeGasUsed), - effectiveGasPrice: BigInt(row.txr_effectiveGasPrice), - from: checksumAddress(row.txr_from), - gasUsed: BigInt(row.txr_gasUsed), - logs: JSON.parse(row.txr_logs).map((log: SyncLog) => ({ - id: `${log.blockHash}-${log.logIndex}`, - address: checksumAddress(log.address), - blockHash: log.blockHash, - blockNumber: hexToBigInt(log.blockNumber), - data: log.data, - logIndex: hexToNumber(log.logIndex), - removed: false, - topics: [ - log.topics[0] ?? null, - log.topics[1] ?? null, - log.topics[2] ?? null, - log.topics[3] ?? null, - ].filter((t): t is Hex => t !== null) as [Hex, ...Hex[]] | [], - transactionHash: log.transactionHash, - transactionIndex: hexToNumber(log.transactionIndex), - })), - logsBloom: row.txr_logsBloom, - status: - row.txr_status === "0x1" - ? "success" - : row.txr_status === "0x0" - ? "reverted" - : (row.txr_status as TransactionReceipt["status"]), - to: row.txr_to ? checksumAddress(row.txr_to) : null, - transactionHash: row.txr_transactionHash, - transactionIndex: Number(row.txr_transactionIndex), - type: - row.txr_type === "0x0" - ? "legacy" - : row.txr_type === "0x1" - ? "eip2930" - : row.tx_type === "0x2" - ? "eip1559" - : row.tx_type === "0x7e" - ? "deposit" - : row.tx_type, + id: row.trace_id, + type: row.trace_callType as Trace["type"], + from: checksumAddress(row.trace_from), + to: checksumAddress(row.trace_to), + gas: BigInt(row.trace_gas), + gasUsed: BigInt(row.trace_gasUsed), + input: row.trace_input, + output: row.trace_output, + value: BigInt(row.trace_value), + traceIndex: Number(row.trace_index), + subcalls: Number(row.trace_subcalls), } : undefined, + transactionReceipt: undefined, + // hasTransactionReceipt + // ? { + // blockHash: row.txr_blockHash, + // blockNumber: BigInt(row.txr_blockNumber), + // contractAddress: row.txr_contractAddress + // ? checksumAddress(row.txr_contractAddress) + // : null, + // cumulativeGasUsed: BigInt(row.txr_cumulativeGasUsed), + // effectiveGasPrice: BigInt(row.txr_effectiveGasPrice), + // from: checksumAddress(row.txr_from), + // gasUsed: BigInt(row.txr_gasUsed), + // logs: JSON.parse(row.txr_logs).map((log: SyncLog) => ({ + // id: `${log.blockHash}-${log.logIndex}`, + // address: checksumAddress(log.address), + // blockHash: log.blockHash, + // blockNumber: hexToBigInt(log.blockNumber), + // data: log.data, + // logIndex: hexToNumber(log.logIndex), + // removed: false, + // topics: [ + // log.topics[0] ?? null, + // log.topics[1] ?? null, + // log.topics[2] ?? null, + // log.topics[3] ?? null, + // ].filter((t): t is Hex => t !== null) as [Hex, ...Hex[]] | [], + // transactionHash: log.transactionHash, + // transactionIndex: hexToNumber(log.transactionIndex), + // })), + // logsBloom: row.txr_logsBloom, + // status: + // row.txr_status === "0x1" + // ? "success" + // : row.txr_status === "0x0" + // ? "reverted" + // : (row.txr_status as TransactionReceipt["status"]), + // to: row.txr_to ? checksumAddress(row.txr_to) : null, + // transactionHash: row.txr_transactionHash, + // transactionIndex: Number(row.txr_transactionIndex), + // type: + // row.txr_type === "0x0" + // ? "legacy" + // : row.txr_type === "0x1" + // ? "eip2930" + // : row.tx_type === "0x2" + // ? "eip1559" + // : row.tx_type === "0x7e" + // ? "deposit" + // : row.tx_type, + // } + // : undefined, } satisfies RawEvent; }); @@ -1040,7 +1073,7 @@ export const createSyncStore = ({ .where("block_number", ">=", fromBlock.toString()) .execute(); await tx - .deleteFrom("callTraces") + .deleteFrom("traces") .where("chainId", "=", chainId) .where("blockNumber", ">=", fromBlock.toString()) .execute(); diff --git a/packages/core/src/sync-store/migrations.ts b/packages/core/src/sync-store/migrations.ts index 9716d3f94..9a0639496 100644 --- a/packages/core/src/sync-store/migrations.ts +++ b/packages/core/src/sync-store/migrations.ts @@ -1,4 +1,5 @@ import type { Common } from "@/common/common.js"; +import { EVENT_TYPES } from "@/utils/checkpoint.js"; import type { Kysely, Migration, MigrationProvider } from "kysely"; import { sql } from "kysely"; @@ -1082,6 +1083,146 @@ AND ponder_sync."rpcRequestResults"."blockNumber" <= 9223372036854775807; .execute(); }, }, + "2024_11_12_0_debug": { + async up(db) { + await db.schema.dropTable("callTraces").ifExists().cascade().execute(); + + await db + .deleteFrom("intervals") + .where("fragment_id", "like", "trace_%") + .execute(); + + await db.schema + .createTable("traces") + .addColumn("id", "text", (col) => col.notNull().primaryKey()) + .addColumn("chainId", "integer", (col) => col.notNull()) + .addColumn("checkpoint", "varchar(75)", (col) => col.notNull()) + .addColumn("type", "text", (col) => col.notNull()) + .addColumn("transactionHash", "varchar(66)", (col) => col.notNull()) + .addColumn("blockNumber", "numeric(78, 0)", (col) => col.notNull()) + .addColumn("blockHash", "varchar(66)", (col) => col.notNull()) + .addColumn("from", "varchar(42)", (col) => col.notNull()) + .addColumn("to", "varchar(42)") + .addColumn("gas", "numeric(78, 0)", (col) => col.notNull()) + .addColumn("gasUsed", "numeric(78, 0)", (col) => col.notNull()) + .addColumn("input", "text", (col) => col.notNull()) + .addColumn("functionSelector", "text", (col) => col.notNull()) + .addColumn("output", "text") + .addColumn("error", "text") + .addColumn("revertReason", "text") + .addColumn("value", "numeric(78, 0)") + .addColumn("index", "integer", (col) => col.notNull()) + .addColumn("subcalls", "integer", (col) => col.notNull()) + .addColumn("isReverted", "integer", (col) => col.notNull()) + .execute(); + + // `getEvents` benefits from an index on + // "blockNumber", "functionSelector", "blockHash" + // "transactionHash", "checkpoint", "chainId", "from", "to", + // "value", "type", and "isReverted" + + await db.schema + .createIndex("trace_block_number_index") + .on("traces") + .column("blockNumber") + .execute(); + + await db.schema + .createIndex("trace_function_selector_index") + .on("traces") + .column("functionSelector") + .execute(); + + await db.schema + .createIndex("trace_is_reverted_index") + .on("traces") + .column("isReverted") + .execute(); + + await db.schema + .createIndex("trace_block_hash_index") + .on("traces") + .column("blockHash") + .execute(); + + await db.schema + .createIndex("trace_transaction_hash_index") + .on("traces") + .column("transactionHash") + .execute(); + + await db.schema + .createIndex("trace_checkpoint_index") + .on("traces") + .column("checkpoint") + .execute(); + + await db.schema + .createIndex("trace_chain_id_index") + .on("traces") + .column("chainId") + .execute(); + + await db.schema + .createIndex("trace_value_index") + .on("traces") + .column("value") + .execute(); + + await db.schema + .createIndex("trace_from_index") + .on("traces") + .column("from") + .execute(); + + await db.schema + .createIndex("trace_to_index") + .on("traces") + .column("to") + .execute(); + + await db.schema + .createIndex("trace_type_index") + .on("traces") + .column("type") + .execute(); + + // add `checkpoint` to `transactions` + await db.schema + .alterTable("transactions") + .addColumn("checkpoint", "varchar(75)") + .execute(); + + await db.executeQuery( + sql + .raw(` +UPDATE ponder_sync.transactions +SET checkpoint = ( + lpad(blocks.timestamp::text, 10, '0') || + lpad(transactions."chainId"::text, 16, '0') || + lpad(transactions."blockNumber"::text, 16, '0') || + lpad(transactions."transactionIndex"::text, 16, '0') || + '${EVENT_TYPES.transactions}' || + '0000000000000000' +) +FROM ponder_sync.blocks +WHERE transactions."blockHash" = blocks.hash + `) + .compile(db), + ); + + await db.schema + .alterTable("transactions") + .alterColumn("checkpoint", (col) => col.setNotNull()) + .execute(); + + await db.schema + .createIndex("transactions_checkpoint_index") + .on("transactions") + .column("checkpoint") + .execute(); + }, + }, }; class StaticMigrationProvider implements MigrationProvider { diff --git a/packages/core/src/sync/abi.ts b/packages/core/src/sync/abi.ts index e60507d09..d23f5eb12 100644 --- a/packages/core/src/sync/abi.ts +++ b/packages/core/src/sync/abi.ts @@ -8,12 +8,11 @@ import { import { type GetEventArgs, type Hex, - type LogTopic, encodeEventTopics, getAbiItem, - getEventSelector, - getFunctionSelector, parseAbiItem, + toEventSelector, + toFunctionSelector, } from "viem"; import type { Config } from "../config/config.js"; @@ -76,7 +75,7 @@ export const buildAbiEvents = ({ abi }: { abi: Abi }) => { const safeName = overloadedEventNames.has(item.name) ? signature.split("event ")[1]! : item.name; - const selector = getEventSelector(item); + const selector = toEventSelector(item); const abiEventMeta = { safeName, signature, selector, item }; @@ -92,18 +91,35 @@ export const buildAbiEvents = ({ abi }: { abi: Abi }) => { export function buildTopics( abi: Abi, filter: NonNullable, -): LogTopic[] { +): { + topic0: Hex | Hex[]; + topic1: Hex | Hex[] | null; + topic2: Hex | Hex[] | null; + topic3: Hex | Hex[] | null; +} { if (Array.isArray(filter.event)) { // List of event signatures - return [ - filter.event.map((event) => getEventSelector(findAbiEvent(abi, event))), - ]; + return { + topic0: filter.event.map((event) => + toEventSelector(findAbiEvent(abi, event)), + ), + topic1: null, + topic2: null, + topic3: null, + }; } else { // Single event with args - return encodeEventTopics({ + const topics = encodeEventTopics({ abi: [findAbiEvent(abi, filter.event)], args: filter.args as GetEventArgs, }); + + return { + topic0: topics[0], + topic1: topics[1] ?? null, + topic2: topics[2] ?? null, + topic3: topics[3] ?? null, + }; } } @@ -136,7 +152,7 @@ export const buildAbiFunctions = ({ abi }: { abi: Abi }) => { const safeName = overloadedFunctionNames.has(item.name) ? signature.split("function ")[1]! : `${item.name}()`; - const selector = getFunctionSelector(item); + const selector = toFunctionSelector(item); const abiEventMeta = { safeName, signature, selector, item }; diff --git a/packages/core/src/sync/events.test.ts b/packages/core/src/sync/events.test.ts index 8c17943c9..76ca25a97 100644 --- a/packages/core/src/sync/events.test.ts +++ b/packages/core/src/sync/events.test.ts @@ -1,4 +1,5 @@ import { ALICE, BOB } from "@/_test/constants.js"; +import { erc20ABI } from "@/_test/generated.js"; import { setupAnvil, setupCommon, @@ -6,42 +7,100 @@ import { setupIsolatedDatabase, } from "@/_test/setup.js"; import { - getEventsBlock, - getEventsLog, - getEventsTrace, - getRawRPCData, + createPair, + deployErc20, + deployFactory, + mintErc20, + swapPair, + transferEth, +} from "@/_test/simulate.js"; +import { + getAccountsConfigAndIndexingFunctions, + getBlocksConfigAndIndexingFunctions, + getErc20ConfigAndIndexingFunctions, + getNetwork, + getPairWithFactoryConfigAndIndexingFunctions, } from "@/_test/utils.js"; +import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; +import type { SyncTrace, SyncTransaction } from "@/types/sync.js"; import { encodeCheckpoint, maxCheckpoint, zeroCheckpoint, } from "@/utils/checkpoint.js"; -import { checksumAddress, parseEther, zeroAddress } from "viem"; +import { createRequestQueue } from "@/utils/requestQueue.js"; +import { + _eth_getBlockByNumber, + _eth_getLogs, + _eth_getTransactionReceipt, +} from "@/utils/rpc.js"; +import { + type Hex, + encodeEventTopics, + padHex, + parseEther, + toHex, + zeroAddress, +} from "viem"; +import { encodeFunctionData, encodeFunctionResult } from "viem/utils"; import { beforeEach, expect, test } from "vitest"; import { type BlockEvent, - type CallTraceEvent, type LogEvent, + type RawEvent, + type TraceEvent, + type TransferEvent, buildEvents, decodeEvents, } from "./events.js"; +import type { LogFactory, LogFilter } from "./source.js"; beforeEach(setupCommon); beforeEach(setupAnvil); beforeEach(setupIsolatedDatabase); test("decodeEvents() log", async (context) => { - const { common, sources } = context; + const { common } = context; + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address: zeroAddress, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + const data = padHex(toHex(parseEther("1")), { size: 32 }); - const rawEvents = await getEventsLog(sources); + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data, + topics, + }, + } as RawEvent; - const events = decodeEvents(common, sources, rawEvents) as [ - LogEvent, - LogEvent, - LogEvent, - ]; + const events = decodeEvents(common, sources, [rawEvent]) as [LogEvent]; - expect(events).toHaveLength(3); + expect(events).toHaveLength(1); expect(events[0].event.args).toMatchObject({ from: zeroAddress, to: ALICE, @@ -50,223 +109,776 @@ test("decodeEvents() log", async (context) => { expect(events[0].event.name).toBe( "Transfer(address indexed from, address indexed to, uint256 amount)", ); - expect(events[1].event.args).toMatchObject({ - from: ALICE, - to: BOB, - amount: parseEther("1"), +}); + +test("decodeEvents() log error", async (context) => { + const { common } = context; + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address: zeroAddress, }); - expect(events[1].event.name).toBe( - "Transfer(address indexed from, address indexed to, uint256 amount)", - ); - expect(events[2].event.args).toMatchObject({ - sender: ALICE, - to: ALICE, - amount0Out: 1n, - amount1Out: 2n, + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, }); - expect(events[2].event.name).toBe("Swap"); + + const topics = encodeEventTopics({ + abi: erc20ABI, + eventName: "Transfer", + args: { + from: zeroAddress, + to: ALICE, + }, + }); + + // invalid log.data, causing an error when decoding + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: { + id: "test", + data: "0x0" as Hex, + topics, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]) as [LogEvent]; + + expect(events).toHaveLength(0); }); -test("decodeEvents() log error", async (context) => { - const { common, sources } = context; +test("decodeEvents() block", async (context) => { + const { common } = context; - const rawEvents = await getEventsLog(sources); + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); - // remove data from log, causing an error when decoding - rawEvents[0]!.log!.data = "0x0"; - const events = decodeEvents(common, sources, rawEvents) as [ - LogEvent, - LogEvent, - ]; + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: { + number: 1n, + } as RawEvent["block"], + transaction: undefined, + log: undefined, + } as RawEvent; - expect(events).toHaveLength(2); + const events = decodeEvents(common, sources, [rawEvent]) as [BlockEvent]; - expect(events[0].event.args).toMatchObject({ + expect(events).toHaveLength(1); + expect(events[0].event.block).toMatchObject({ + number: 1n, + }); +}); + +test("decodeEvents() transfer", async (context) => { + const { common } = context; + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rawEvent = { + chainId: 1, + sourceIndex: 3, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: undefined, + trace: { + id: "test", + type: "CALL", + from: ALICE, + to: BOB, + gas: 0n, + gasUsed: 0n, + input: "0x0", + output: "0x0", + value: parseEther("1"), + traceIndex: 0, + subcalls: 0, + }, + } as RawEvent; + + const events = decodeEvents(common, sources, [rawEvent]) as [TransferEvent]; + + expect(events).toHaveLength(1); + expect(events[0].event.transfer).toMatchObject({ from: ALICE, to: BOB, - amount: parseEther("1"), - }); - expect(events[1].event.args).toMatchObject({ - sender: ALICE, - to: ALICE, - amount0Out: 1n, - amount1Out: 2n, + value: parseEther("1"), }); + expect(events[0].name).toBe("Accounts:transfer:from"); }); -test("decodeEvents() block", async (context) => { - const { common, sources } = context; +test("decodeEvents() transaction", async (context) => { + const { common } = context; + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); - const rawEvents = await getEventsBlock(sources); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rawEvent = { + chainId: 1, + sourceIndex: 0, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: undefined, + trace: undefined, + } as RawEvent; - const events = decodeEvents(common, sources, rawEvents) as [BlockEvent]; + const events = decodeEvents(common, sources, [rawEvent]) as [TransferEvent]; expect(events).toHaveLength(1); - expect(events[0].event.block).toMatchObject({ - number: 3n, - }); + + expect(events[0].name).toBe("Accounts:transaction:to"); }); test("decodeEvents() trace", async (context) => { - const { common, sources } = context; + const { common } = context; - const rawEvents = await getEventsTrace(sources); + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address: zeroAddress, + includeCallTraces: true, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rawEvent = { + chainId: 1, + sourceIndex: 1, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: undefined, + trace: { + id: "test", + type: "CALL", + from: ALICE, + to: BOB, + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + gas: 0n, + gasUsed: 0n, + value: 0n, + traceIndex: 0, + subcalls: 0, + }, + } as RawEvent; - const events = decodeEvents(common, sources, rawEvents) as [CallTraceEvent]; + const events = decodeEvents(common, sources, [rawEvent]) as [TraceEvent]; expect(events).toHaveLength(1); - expect(events[0].event.args).toBeUndefined(); - expect(events[0].event.result).toBe(checksumAddress(context.factory.pair)); - expect(events[0].name).toBe("Factory.createPair()"); + expect(events[0].event.args).toStrictEqual([BOB, parseEther("1")]); + expect(events[0].event.result).toBe(true); + expect(events[0].name).toBe("Erc20.transfer()"); }); test("decodeEvents() trace error", async (context) => { - const { common, sources } = context; + const { common } = context; + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address: zeroAddress, + includeCallTraces: true, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); - const rawEvents = await getEventsTrace(sources); + const rawEvent = { + chainId: 1, + sourceIndex: 1, + checkpoint: encodeCheckpoint(zeroCheckpoint), + block: {} as RawEvent["block"], + transaction: {} as RawEvent["transaction"], + log: undefined, + trace: { + id: "test", + type: "CALL", + from: ALICE, + to: BOB, + input: "0x", + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + gas: 0n, + gasUsed: 0n, + value: 0n, + traceIndex: 0, + subcalls: 0, + }, + } as RawEvent; - // change function selector, causing an error when decoding - rawEvents[0]!.trace!.input = "0x0"; - const events = decodeEvents(common, sources, rawEvents) as [CallTraceEvent]; + const events = decodeEvents(common, sources, [rawEvent]) as [TraceEvent]; expect(events).toHaveLength(0); }); -test("buildEvents() matches getEvents()", async (context) => { +test("buildEvents() matches getEvents() log", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const rpcData = await getRawRPCData(); - - await syncStore.insertBlocks({ - blocks: [ - rpcData.block1.block, - rpcData.block2.block, - rpcData.block3.block, - rpcData.block4.block, - rpcData.block5.block, - ], + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // insert block 2 + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + + const rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); await syncStore.insertLogs({ - logs: [ - { log: rpcData.block2.logs[0], block: rpcData.block2.block }, - { log: rpcData.block2.logs[1], block: rpcData.block2.block }, - { log: rpcData.block3.logs[0], block: rpcData.block3.block }, - { log: rpcData.block4.logs[0], block: rpcData.block4.block }, - ], + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], shouldUpdateCheckpoint: true, chainId: 1, }); + + const { events: events1 } = await syncStore.getEvents({ + filters: sources.map((s) => s.filter), + from: encodeCheckpoint(zeroCheckpoint), + to: encodeCheckpoint(maxCheckpoint), + limit: 10, + }); + + const events2 = buildEvents({ + sources, + chainId: 1, + blockWithEventData: { + block: rpcBlock, + logs: rpcLogs, + transactions: rpcBlock.transactions, + traces: [], + transactionReceipts: [], + }, + finalizedChildAddresses: new Map(), + unfinalizedChildAddresses: new Map(), + }); + + expect(events1).toHaveLength(1); + + expect(events2).toStrictEqual(events1); + + await cleanup(); +}); + +test("buildEvents() matches getEvents() log factory", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployFactory({ sender: ALICE }); + const { result: pair } = await createPair({ + factory: address, + sender: ALICE, + }); + await swapPair({ + pair, + amount0Out: 1n, + amount1Out: 1n, + to: ALICE, + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getPairWithFactoryConfigAndIndexingFunctions({ + address, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // insert block 2 + + let rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 2, + toBlock: 2, + }); + await syncStore.insertLogs({ + logs: [{ log: rpcLogs[0]! }], + shouldUpdateCheckpoint: false, + chainId: 1, + }); + + // insert block 3 + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 3, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + await syncStore.insertTransactions({ - transactions: [ - ...rpcData.block2.transactions, - ...rpcData.block3.transactions, - ...rpcData.block4.transactions, - ], + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], chainId: 1, }); + + rpcLogs = await _eth_getLogs(requestQueue, { + fromBlock: 3, + toBlock: 3, + }); + await syncStore.insertLogs({ + logs: [{ log: rpcLogs[0]!, block: rpcBlock }], + shouldUpdateCheckpoint: true, + chainId: 1, + }); + + const { events: events1 } = await syncStore.getEvents({ + filters: sources.map((s) => s.filter), + from: encodeCheckpoint(zeroCheckpoint), + to: encodeCheckpoint(maxCheckpoint), + limit: 10, + }); + + const filter = sources[0]!.filter as LogFilter; + + const events2 = buildEvents({ + sources, + chainId: 1, + blockWithEventData: { + block: rpcBlock, + logs: rpcLogs, + transactions: rpcBlock.transactions, + traces: [], + transactionReceipts: [], + }, + finalizedChildAddresses: new Map([[filter.address, new Set()]]), + unfinalizedChildAddresses: new Map([[filter.address, new Set([pair])]]), + }); + + expect(events1).toHaveLength(1); + + expect(events2).toStrictEqual(events1); + + await cleanup(); +}); + +test("buildEvents() matches getEvents() block", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // insert block 0 + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 0, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + const { events: events1 } = await syncStore.getEvents({ + filters: sources.map((s) => s.filter), + from: encodeCheckpoint(zeroCheckpoint), + to: encodeCheckpoint(maxCheckpoint), + limit: 10, + }); + + const events2 = buildEvents({ + sources, + chainId: 1, + blockWithEventData: { + block: rpcBlock, + logs: [], + transactions: [], + traces: [], + transactionReceipts: [], + }, + finalizedChildAddresses: new Map(), + unfinalizedChildAddresses: new Map(), + }); + + expect(events1).toHaveLength(1); + + expect(events2).toStrictEqual(events1); + + await cleanup(); +}); + +test("buildEvents() matches getEvents() transfer", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { hash } = await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + const rpcReceipt = await _eth_getTransactionReceipt(requestQueue, { hash }); + await syncStore.insertTransactionReceipts({ - transactionReceipts: [ - ...rpcData.block2.transactionReceipts, - ...rpcData.block3.transactionReceipts, - ...rpcData.block4.transactionReceipts, - ], + transactionReceipts: [rpcReceipt], chainId: 1, }); - await syncStore.insertCallTraces({ - callTraces: [ - { callTrace: rpcData.block2.callTraces[0], block: rpcData.block2.block }, - { callTrace: rpcData.block2.callTraces[1], block: rpcData.block2.block }, - { callTrace: rpcData.block3.callTraces[0], block: rpcData.block3.block }, - { callTrace: rpcData.block4.callTraces[0], block: rpcData.block4.block }, + + const rpcTrace = { + trace: { + type: "CALL", + from: ALICE, + to: BOB, + gas: "0x0", + gasUsed: "0x0", + input: "0x0", + output: "0x0", + value: rpcBlock.transactions[0]!.value, + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + await syncStore.insertTraces({ + traces: [ + { + trace: rpcTrace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, + }, ], chainId: 1, }); const { events: events1 } = await syncStore.getEvents({ - filters: context.sources.map((s) => s.filter), + filters: sources.map((s) => s.filter), from: encodeCheckpoint(zeroCheckpoint), to: encodeCheckpoint(maxCheckpoint), limit: 10, }); - const events2 = [ - ...buildEvents({ - sources: context.sources, - chainId: 1, - blockWithEventData: { - ...rpcData.block1, - callTraces: [], - }, - finalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set()], - [context.sources[2].filter.toAddress, new Set()], - ]), - unfinalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set()], - [context.sources[2].filter.toAddress, new Set()], - ]), - }), - ...buildEvents({ - sources: context.sources, - chainId: 1, - blockWithEventData: { - ...rpcData.block2, - }, - finalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set()], - [context.sources[2].filter.toAddress, new Set()], - ]), - unfinalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set()], - [context.sources[2].filter.toAddress, new Set()], - ]), - }), - ...buildEvents({ - sources: context.sources, - chainId: 1, - blockWithEventData: { - ...rpcData.block3, - }, - finalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set()], - [context.sources[2].filter.toAddress, new Set()], - ]), - unfinalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set()], - [context.sources[2].filter.toAddress, new Set()], - ]), - }), - ...buildEvents({ - sources: context.sources, - chainId: 1, - blockWithEventData: { - ...rpcData.block4, - }, - finalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set()], - [context.sources[2].filter.toAddress, new Set()], - ]), - unfinalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set([context.factory.pair])], - [context.sources[2].filter.toAddress, new Set([context.factory.pair])], - ]), - }), - ...buildEvents({ - sources: context.sources, - chainId: 1, - blockWithEventData: { - ...rpcData.block5, + const events2 = buildEvents({ + sources, + chainId: 1, + blockWithEventData: { + block: rpcBlock, + logs: [], + transactions: rpcBlock.transactions, + traces: [rpcTrace], + transactionReceipts: [rpcReceipt], + }, + finalizedChildAddresses: new Map(), + unfinalizedChildAddresses: new Map(), + }); + + // transaction:from and transfer:from + expect(events1).toHaveLength(2); + + expect(events2).toStrictEqual(events1); + + await cleanup(); +}); + +test("buildEvents() matches getEvents() transaction", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { hash } = await transferEth({ + to: BOB, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = + getAccountsConfigAndIndexingFunctions({ + address: ALICE, + }); + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 1, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + const rpcReceipt = await _eth_getTransactionReceipt(requestQueue, { hash }); + + await syncStore.insertTransactionReceipts({ + transactionReceipts: [rpcReceipt], + chainId: 1, + }); + + const { events: events1 } = await syncStore.getEvents({ + filters: sources.map((s) => s.filter), + from: encodeCheckpoint(zeroCheckpoint), + to: encodeCheckpoint(maxCheckpoint), + limit: 10, + }); + + const events2 = buildEvents({ + sources, + chainId: 1, + blockWithEventData: { + block: rpcBlock, + logs: [], + transactions: rpcBlock.transactions, + traces: [], + transactionReceipts: [rpcReceipt], + }, + finalizedChildAddresses: new Map(), + unfinalizedChildAddresses: new Map(), + }); + + expect(events1).toHaveLength(1); + + expect(events2).toStrictEqual(events1); + + await cleanup(); +}); + +test("buildEvents() matches getEvents() trace", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + const { hash } = await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + + const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ + address, + includeCallTraces: true, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + blockNumber: 2, + }); + await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); + + await syncStore.insertTransactions({ + transactions: [{ transaction: rpcBlock.transactions[0]!, block: rpcBlock }], + chainId: 1, + }); + + const rpcTrace = { + trace: { + type: "CALL", + from: ALICE, + to: address, + gas: "0x0", + gasUsed: "0x0", + input: encodeFunctionData({ + abi: erc20ABI, + functionName: "transfer", + args: [BOB, parseEther("1")], + }), + output: encodeFunctionResult({ + abi: erc20ABI, + functionName: "transfer", + result: true, + }), + value: "0x0", + index: 0, + subcalls: 0, + }, + transactionHash: hash, + } satisfies SyncTrace; + + await syncStore.insertTraces({ + traces: [ + { + trace: rpcTrace, + block: rpcBlock, + transaction: rpcBlock.transactions[0] as SyncTransaction, }, - finalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set()], - [context.sources[2].filter.toAddress, new Set()], - ]), - unfinalizedChildAddresses: new Map([ - [context.sources[1].filter.address, new Set([context.factory.pair])], - [context.sources[2].filter.toAddress, new Set([context.factory.pair])], - ]), - }), - ]; + ], + chainId: 1, + }); + + const { events: events1 } = await syncStore.getEvents({ + filters: sources.map((s) => s.filter), + from: encodeCheckpoint(zeroCheckpoint), + to: encodeCheckpoint(maxCheckpoint), + limit: 10, + }); + + const events2 = buildEvents({ + sources, + chainId: 1, + blockWithEventData: { + block: rpcBlock, + logs: [], + transactions: rpcBlock.transactions, + traces: [rpcTrace], + transactionReceipts: [], + }, + finalizedChildAddresses: new Map(), + unfinalizedChildAddresses: new Map(), + }); + + expect(events1).toHaveLength(1); expect(events2).toStrictEqual(events1); diff --git a/packages/core/src/sync/events.ts b/packages/core/src/sync/events.ts index fd1023d60..483c6664c 100644 --- a/packages/core/src/sync/events.ts +++ b/packages/core/src/sync/events.ts @@ -1,21 +1,24 @@ import type { Common } from "@/common/common.js"; import { isBlockFilterMatched, - isCallTraceFilterMatched, isLogFilterMatched, + isTraceFilterMatched, + isTransactionFilterMatched, + isTransferFilterMatched, } from "@/sync-realtime/filter.js"; import type { BlockWithEventData } from "@/sync-realtime/index.js"; import type { Block, - CallTrace, Log, + Trace, Transaction, TransactionReceipt, + Transfer, } from "@/types/eth.js"; import type { SyncBlock, - SyncCallTrace, SyncLog, + SyncTrace, SyncTransaction, SyncTransactionReceipt, } from "@/types/sync.js"; @@ -41,7 +44,12 @@ import { hexToBigInt, hexToNumber, } from "viem"; -import { type Factory, type Source, isAddressFactory } from "./source.js"; +import { + type BlockFilter, + type Factory, + type Source, + isAddressFactory, +} from "./source.js"; export type RawEvent = { chainId: number; @@ -51,10 +59,15 @@ export type RawEvent = { block: Block; transaction?: Transaction; transactionReceipt?: TransactionReceipt; - trace?: CallTrace; + trace?: Trace; }; -export type Event = LogEvent | BlockEvent | CallTraceEvent; +export type Event = + | LogEvent + | BlockEvent + | TransactionEvent + | TransferEvent + | TraceEvent; export type SetupEvent = { type: "setup"; @@ -98,18 +111,50 @@ export type BlockEvent = { }; }; -export type CallTraceEvent = { - type: "callTrace"; +export type TransactionEvent = { + type: "transaction"; chainId: number; checkpoint: string; - /** `${source.name}.${safeName}()` */ + /** `${source.name}.{safeName}()` */ + name: string; + + event: { + block: Block; + transaction: Transaction; + transactionReceipt?: TransactionReceipt; + }; +}; + +export type TransferEvent = { + type: "transfer"; + chainId: number; + checkpoint: string; + + /** `${source.name}:transfer:from` | `${source.name}:transfer:to` */ + name: string; + + event: { + transfer: Transfer; + block: Block; + transaction: Transaction; + transactionReceipt?: TransactionReceipt; + trace: Trace; + }; +}; + +export type TraceEvent = { + type: "trace"; + chainId: number; + checkpoint: string; + + /** `${source.name}:transfer:from` | `${source.name}:transfer:to` */ name: string; event: { args: any; result: any; - trace: CallTrace; + trace: Trace; block: Block; transaction: Transaction; transactionReceipt?: TransactionReceipt; @@ -126,7 +171,7 @@ export const buildEvents = ({ logs, transactions, transactionReceipts, - callTraces, + traces, }, finalizedChildAddresses, unfinalizedChildAddresses, @@ -142,7 +187,6 @@ export const buildEvents = ({ const transactionCache = new Map(); const transactionReceiptCache = new Map(); - const traceByTransactionHash = new Map(); for (const transaction of transactions) { transactionCache.set(transaction.hash, transaction); } @@ -152,56 +196,218 @@ export const buildEvents = ({ transactionReceipt, ); } - for (const callTrace of callTraces) { - if (traceByTransactionHash.has(callTrace.transactionHash) === false) { - traceByTransactionHash.set(callTrace.transactionHash, []); - } - traceByTransactionHash.get(callTrace.transactionHash)!.push(callTrace); - } for (let i = 0; i < sources.length; i++) { - const filter = sources[i]!.filter; + const source = sources[i]!; + const filter = source.filter; if (chainId !== filter.chainId) continue; - switch (filter.type) { - case "log": { - for (const log of logs) { - if ( - isLogFilterMatched({ filter, block, log }) && - (isAddressFactory(filter.address) - ? finalizedChildAddresses.get(filter.address)!.has(log.address) || - unfinalizedChildAddresses.get(filter.address)!.has(log.address) - : true) - ) { - events.push({ - chainId: filter.chainId, - sourceIndex: i, - checkpoint: encodeCheckpoint({ - blockTimestamp: hexToNumber(block.timestamp), - chainId: BigInt(filter.chainId), - blockNumber: hexToBigInt(log.blockNumber), - transactionIndex: hexToBigInt(log.transactionIndex), - eventType: EVENT_TYPES.logs, - eventIndex: hexToBigInt(log.logIndex), - }), - log: convertLog(log), - block: convertBlock(block), - transaction: convertTransaction( - transactionCache.get(log.transactionHash)!, - ), - transactionReceipt: filter.includeTransactionReceipts - ? convertTransactionReceipt( - transactionReceiptCache.get(log.transactionHash)!, - ) - : undefined, - trace: undefined, - }); + switch (source.type) { + case "contract": { + switch (filter.type) { + case "log": { + for (const log of logs) { + if ( + isLogFilterMatched({ filter, block, log }) && + (isAddressFactory(filter.address) + ? finalizedChildAddresses + .get(filter.address)! + .has(log.address) || + unfinalizedChildAddresses + .get(filter.address)! + .has(log.address) + : true) + ) { + events.push({ + chainId: filter.chainId, + sourceIndex: i, + checkpoint: encodeCheckpoint({ + blockTimestamp: hexToNumber(block.timestamp), + chainId: BigInt(filter.chainId), + blockNumber: hexToBigInt(log.blockNumber), + transactionIndex: hexToBigInt(log.transactionIndex), + eventType: EVENT_TYPES.logs, + eventIndex: hexToBigInt(log.logIndex), + }), + log: convertLog(log), + block: convertBlock(block), + transaction: convertTransaction( + transactionCache.get(log.transactionHash)!, + ), + transactionReceipt: undefined, + trace: undefined, + }); + } + } + break; + } + + case "trace": { + for (const trace of traces) { + const fromChildAddresses = isAddressFactory(filter.fromAddress) + ? [ + finalizedChildAddresses.get(filter.fromAddress)!, + unfinalizedChildAddresses.get(filter.fromAddress)!, + ] + : undefined; + + const toChildAddresses = isAddressFactory(filter.toAddress) + ? [ + finalizedChildAddresses.get(filter.toAddress)!, + unfinalizedChildAddresses.get(filter.toAddress)!, + ] + : undefined; + + if ( + isTraceFilterMatched({ + filter, + block, + trace: trace.trace, + fromChildAddresses, + toChildAddresses, + }) && + (filter.includeReverted + ? true + : trace.trace.error === undefined) + ) { + const transaction = transactionCache.get( + trace.transactionHash, + )!; + events.push({ + chainId: filter.chainId, + sourceIndex: i, + checkpoint: encodeCheckpoint({ + blockTimestamp: hexToNumber(block.timestamp), + chainId: BigInt(filter.chainId), + blockNumber: hexToBigInt(block.number), + transactionIndex: BigInt(transaction.transactionIndex), + eventType: EVENT_TYPES.traces, + eventIndex: BigInt(trace.trace.index), + }), + log: undefined, + trace: convertTrace(trace), + block: convertBlock(block), + transaction: convertTransaction(transaction), + transactionReceipt: undefined, + }); + } + } + break; + } + } + break; + } + + case "account": { + switch (filter.type) { + case "transaction": { + for (const transaction of transactions) { + const fromChildAddresses = isAddressFactory(filter.fromAddress) + ? [ + finalizedChildAddresses.get(filter.fromAddress)!, + unfinalizedChildAddresses.get(filter.fromAddress)!, + ] + : undefined; + + const toChildAddresses = isAddressFactory(filter.toAddress) + ? [ + finalizedChildAddresses.get(filter.toAddress)!, + unfinalizedChildAddresses.get(filter.toAddress)!, + ] + : undefined; + + if ( + isTransactionFilterMatched({ + filter, + block, + transaction, + fromChildAddresses, + toChildAddresses, + }) && + (filter.includeReverted + ? true + : transactionReceiptCache.get(transaction.hash)!.status === + "0x1") + ) { + events.push({ + chainId: filter.chainId, + sourceIndex: i, + checkpoint: encodeCheckpoint({ + blockTimestamp: hexToNumber(block.timestamp), + chainId: BigInt(filter.chainId), + blockNumber: hexToBigInt(block.number), + transactionIndex: BigInt(transaction.transactionIndex), + eventType: EVENT_TYPES.transactions, + eventIndex: 0n, + }), + log: undefined, + trace: undefined, + block: convertBlock(block), + transaction: convertTransaction(transaction), + transactionReceipt: undefined, + }); + } + } + break; + } + + case "transfer": { + for (const trace of traces) { + const fromChildAddresses = isAddressFactory(filter.fromAddress) + ? [ + finalizedChildAddresses.get(filter.fromAddress)!, + unfinalizedChildAddresses.get(filter.fromAddress)!, + ] + : undefined; + + const toChildAddresses = isAddressFactory(filter.toAddress) + ? [ + finalizedChildAddresses.get(filter.toAddress)!, + unfinalizedChildAddresses.get(filter.toAddress)!, + ] + : undefined; + + if ( + isTransferFilterMatched({ + filter, + block, + trace: trace.trace, + fromChildAddresses, + toChildAddresses, + }) && + (filter.includeReverted + ? true + : trace.trace.error === undefined) + ) { + const transaction = transactionCache.get( + trace.transactionHash, + )!; + events.push({ + chainId: filter.chainId, + sourceIndex: i, + checkpoint: encodeCheckpoint({ + blockTimestamp: hexToNumber(block.timestamp), + chainId: BigInt(filter.chainId), + blockNumber: hexToBigInt(block.number), + transactionIndex: BigInt(transaction.transactionIndex), + eventType: EVENT_TYPES.traces, + eventIndex: BigInt(trace.trace.index), + }), + log: undefined, + trace: convertTrace(trace), + block: convertBlock(block), + transaction: convertTransaction(transaction), + transactionReceipt: undefined, + }); + } + } + break; } } break; } case "block": { - if (isBlockFilterMatched({ filter, block })) { + if (isBlockFilterMatched({ filter: filter as BlockFilter, block })) { events.push({ chainId: filter.chainId, sourceIndex: i, @@ -222,62 +428,8 @@ export const buildEvents = ({ } break; } - - case "callTrace": { - for (const callTraces of Array.from(traceByTransactionHash.values())) { - // Use lexographical sort of stringified `traceAddress`. - callTraces.sort((a, b) => { - return a.traceAddress < b.traceAddress ? -1 : 1; - }); - - let eventIndex = 0n; - for (const callTrace of callTraces) { - if ( - isCallTraceFilterMatched({ filter, block, callTrace }) && - (isAddressFactory(filter.toAddress) - ? finalizedChildAddresses - .get(filter.toAddress)! - .has(callTrace.action.to) || - unfinalizedChildAddresses - .get(filter.toAddress)! - .has(callTrace.action.to) - : true) && - callTrace.result !== null && - filter.functionSelectors.includes( - callTrace.action.input.slice(0, 10).toLowerCase() as Hex, - ) - ) { - events.push({ - chainId: filter.chainId, - sourceIndex: i, - checkpoint: encodeCheckpoint({ - blockTimestamp: hexToNumber(block.timestamp), - chainId: BigInt(filter.chainId), - blockNumber: hexToBigInt(callTrace.blockNumber), - transactionIndex: BigInt(callTrace.transactionPosition), - eventType: EVENT_TYPES.callTraces, - eventIndex: eventIndex++, - }), - log: undefined, - trace: convertCallTrace(callTrace), - block: convertBlock(block), - transaction: convertTransaction( - transactionCache.get(callTrace.transactionHash)!, - ), - transactionReceipt: filter.includeTransactionReceipts - ? convertTransactionReceipt( - transactionReceiptCache.get(callTrace.transactionHash)!, - ) - : undefined, - }); - } - } - } - - break; - } default: - never(filter); + never(source); } } @@ -297,18 +449,6 @@ export const decodeEvents = ( const source = sources[event.sourceIndex]!; switch (source.type) { - case "block": { - events.push({ - type: "block", - chainId: event.chainId, - checkpoint: event.checkpoint, - name: `${source.name}:block`, - event: { - block: event.block, - }, - }); - break; - } case "contract": { switch (source.filter.type) { case "log": { @@ -361,7 +501,7 @@ export const decodeEvents = ( break; } - case "callTrace": { + case "trace": { try { const selector = event .trace!.input.slice(0, 10) @@ -371,7 +511,7 @@ export const decodeEvents = ( throw new Error(); } - const { safeName, item } = + const { item, safeName } = source.abiFunctions.bySelector[selector]!; const { args, functionName } = decodeFunctionData({ @@ -381,15 +521,16 @@ export const decodeEvents = ( const result = decodeFunctionResult({ abi: [item], - data: event.trace!.output, + data: event.trace!.output!, functionName, }); events.push({ - type: "callTrace", + type: "trace", chainId: event.chainId, checkpoint: event.checkpoint, + // NOTE: `safename` includes () name: `${source.name}.${safeName}`, event: { @@ -423,6 +564,70 @@ export const decodeEvents = ( break; } + case "account": { + switch (source.filter.type) { + case "transaction": { + const isFrom = source.filter.toAddress === undefined; + + events.push({ + type: "transaction", + chainId: event.chainId, + checkpoint: event.checkpoint, + + name: `${source.name}:transaction:${isFrom ? "from" : "to"}`, + + event: { + block: event.block, + transaction: event.transaction!, + transactionReceipt: event.transactionReceipt, + }, + }); + + break; + } + + case "transfer": { + const isFrom = source.filter.toAddress === undefined; + + events.push({ + type: "transfer", + chainId: event.chainId, + checkpoint: event.checkpoint, + + name: `${source.name}:transfer:${isFrom ? "from" : "to"}`, + + event: { + transfer: { + from: event.trace!.from, + to: event.trace!.to!, + value: event.trace!.value!, + }, + block: event.block, + transaction: event.transaction!, + transactionReceipt: event.transactionReceipt, + trace: event.trace!, + }, + }); + + break; + } + } + break; + } + + case "block": { + events.push({ + type: "block", + chainId: event.chainId, + checkpoint: event.checkpoint, + name: `${source.name}:block`, + event: { + block: event.block, + }, + }); + break; + } + default: never(source); } @@ -593,6 +798,7 @@ const convertTransaction = (transaction: SyncTransaction): Transaction => ({ }), }); +// @ts-ignore const convertTransactionReceipt = ( transactionReceipt: SyncTransactionReceipt, ): TransactionReceipt => ({ @@ -644,20 +850,16 @@ const convertTransactionReceipt = ( : transactionReceipt.type, }); -const convertCallTrace = (callTrace: SyncCallTrace): CallTrace => ({ - id: `${callTrace.transactionHash}-${JSON.stringify(callTrace.traceAddress)}`, - from: checksumAddress(callTrace.action.from), - to: checksumAddress(callTrace.action.to), - gas: hexToBigInt(callTrace.action.gas), - value: hexToBigInt(callTrace.action.value), - input: callTrace.action.input, - output: callTrace.result!.output, - gasUsed: hexToBigInt(callTrace.result!.gasUsed), - subtraces: callTrace.subtraces, - traceAddress: callTrace.traceAddress, - blockHash: callTrace.blockHash, - blockNumber: hexToBigInt(callTrace.blockNumber), - transactionHash: callTrace.transactionHash, - transactionIndex: callTrace.transactionPosition, - callType: callTrace.action.callType as CallTrace["callType"], +const convertTrace = (trace: SyncTrace): Trace => ({ + id: `${trace.transactionHash}-${trace.trace.index}`, + type: trace.trace.type, + from: checksumAddress(trace.trace.from), + to: trace.trace.to ? checksumAddress(trace.trace.to) : null, + input: trace.trace.input, + output: trace.trace.output, + gas: hexToBigInt(trace.trace.gas), + gasUsed: hexToBigInt(trace.trace.gasUsed), + value: trace.trace.value ? hexToBigInt(trace.trace.value) : null, + traceIndex: trace.trace.index, + subcalls: trace.trace.subcalls, }); diff --git a/packages/core/src/sync/fragments.test.ts b/packages/core/src/sync/fragments.test.ts index 35520f6b4..172dac99f 100644 --- a/packages/core/src/sync/fragments.test.ts +++ b/packages/core/src/sync/fragments.test.ts @@ -12,8 +12,11 @@ test("getLogFilterFragmentIds generates 1 log filter fragment for null filter", type: "log", chainId: 1, address: undefined, - topics: [null, null, null, null], - includeTransactionReceipts: false, + topic0: null, + topic1: null, + topic2: null, + topic3: null, + // includeTransactionReceipts: false, }); expect(logFilterFragments[0]!.id).toBe("log_1_null_null_null_null_null_0"); @@ -24,8 +27,11 @@ test("getLogFilterFragmentIds generates 1 log filter fragment for simple filter" type: "log", chainId: 1, address: "0xa", - topics: [null, null, null, null], - includeTransactionReceipts: false, + topic0: null, + topic1: null, + topic2: null, + topic3: null, + // includeTransactionReceipts: false, }); expect(logFilterFragments[0]!.id).toBe("log_1_0xa_null_null_null_null_0"); @@ -36,8 +42,11 @@ test("getLogFilterFragmentIds generates 4 log filter fragment for 2x2 filter", ( type: "log", chainId: 115511, address: ["0xa", "0xb"], - topics: [["0xc", "0xd"], null, "0xe", null], - includeTransactionReceipts: false, + topic0: ["0xc", "0xd"], + topic1: null, + topic2: "0xe", + topic3: null, + // includeTransactionReceipts: false, }); expect(logFilterFragments[0]!.id).toBe("log_115511_0xa_0xc_null_0xe_null_0"); @@ -51,20 +60,26 @@ test("getLogFilterFragmentIds generates 12 log filter fragment for 2x2x3 filter" type: "log", chainId: 1, address: ["0xa", "0xb"], - topics: [["0xc", "0xd"], null, ["0xe", "0xf", "0x1"], null], - includeTransactionReceipts: false, + topic0: ["0xc", "0xd"], + topic1: null, + topic2: ["0xe", "0xf", "0x1"], + topic3: null, + // includeTransactionReceipts: false, }); expect(logFilterFragments.length).toBe(12); }); -test("getLogFilterFragmentIds includeTransactionReceipts", () => { +test.skip("getLogFilterFragmentIds includeTransactionReceipts", () => { const logFilterFragments = getLogFilterFragmentIds({ type: "log", chainId: 1, address: undefined, - topics: [null, null, null, null], - includeTransactionReceipts: true, + topic0: null, + topic1: null, + topic2: null, + topic3: null, + // includeTransactionReceipts: true, }); expect(logFilterFragments[0]!.id).toBe("log_1_null_null_null_null_null_1"); @@ -81,9 +96,12 @@ test("getLogFilterFragmentIds builds id containing factory topic", () => { const fragments = getLogFilterFragmentIds({ type: "log", chainId: 1, - topics: [null, null, null, null], + topic0: null, + topic1: null, + topic2: null, + topic3: null, address: factory, - includeTransactionReceipts: false, + // includeTransactionReceipts: false, }); expect(fragments).toHaveLength(1); @@ -104,9 +122,12 @@ test("getLogFilterFragmentIds builds id containing factory offset", () => { const fragments = getLogFilterFragmentIds({ type: "log", chainId: 115511, - topics: [null, null, null, null], + topic0: null, + topic1: null, + topic2: null, + topic3: null, address: factory, - includeTransactionReceipts: false, + // includeTransactionReceipts: false, }); expect(fragments).toHaveLength(1); @@ -127,9 +148,12 @@ test("getLogFilterFragmentIds builds id with multiple factories", () => { const fragments = getLogFilterFragmentIds({ type: "log", chainId: 1, - topics: [null, null, null, null], + topic0: null, + topic1: null, + topic2: null, + topic3: null, address: factory, - includeTransactionReceipts: false, + // includeTransactionReceipts: false, }); expect(fragments).toHaveLength(2); diff --git a/packages/core/src/sync/fragments.ts b/packages/core/src/sync/fragments.ts index 248cd8339..99033ecba 100644 --- a/packages/core/src/sync/fragments.ts +++ b/packages/core/src/sync/fragments.ts @@ -1,10 +1,13 @@ +import type { Trace } from "@/utils/debug.js"; import type { Address, Hex } from "viem"; import { type BlockFilter, - type CallTraceFilter, type Factory, type Filter, type LogFilter, + type TraceFilter, + type TransactionFilter, + type TransferFilter, isAddressFactory, } from "./source.js"; @@ -15,24 +18,35 @@ type FragmentAddress = type FragmentTopic = Hex | null; export type FragmentId = + /** log_{chainId}_{address}_{topic0}_{topic1}_{topic2}_{topic3}_{includeReceipts} */ | `log_${number}_${FragmentAddress}_${FragmentTopic}_${FragmentTopic}_${FragmentTopic}_${FragmentTopic}_${0 | 1}` - | `trace_${number}_${FragmentAddress}_${Address | null}` + /** transaction_{chainId}_{fromAddress}_{toAddress}_{includeReceipts} */ + | `transaction_${number}_${FragmentAddress}_${FragmentAddress}_${0 | 1}` + /** transfer_{chainId}_{fromAddress}_{toAddress} */ + | `transfer_${number}_${FragmentAddress}_${FragmentAddress}` + /** trace_{chainId}_{fromAddress}_{toAddress}_{callType}_{functionSelector}_{includeReceipts} */ + | `trace_${number}_${FragmentAddress}_${FragmentAddress}_${Trace["result"]["type"] | null}_${Hex | null}_${0 | 1}` + /** block_{chainId}_{interval}_{offset} */ | `block_${number}_${number}_${number}`; export const getFragmentIds = ( - filter: Filter extends Filter - ? Omit - : never, + filter: Omit, ): FragmentReturnType => { - if (filter.type === "log") { - return getLogFilterFragmentIds(filter as LogFilter); - } + switch (filter.type) { + case "log": + return getLogFilterFragmentIds(filter as LogFilter); + case "block": + return getBlockFilterFragmentId(filter as BlockFilter); - if (filter.type === "callTrace") { - return getTraceFilterFragmentIds(filter as CallTraceFilter); - } + case "transaction": + return getTransactionFilterFragmentIds(filter as TransactionFilter); + + case "transfer": + return getTransferFilterFragmentIds(filter as TransferFilter); - return getBlockFilterFragmentId(filter as BlockFilter); + case "trace": + return getTraceFilterFragmentIds(filter as TraceFilter); + } }; type FragmentReturnType = { @@ -40,6 +54,34 @@ type FragmentReturnType = { adjacent: FragmentId[]; }[]; +const getAddressFragmentIds = ( + address: Address | Address[] | Factory | undefined, +) => { + const fragments: { id: FragmentAddress; adjacent: FragmentAddress[] }[] = []; + + if (isAddressFactory(address)) { + for (const fragmentAddress of Array.isArray(address.address) + ? address.address + : [address.address]) { + const id = + `${fragmentAddress}_${address.eventSelector}_${address.childAddressLocation}` as const; + + fragments.push({ id, adjacent: [id] }); + } + } else { + for (const fragmentAddress of Array.isArray(address) + ? address + : [address ?? null]) { + fragments.push({ + id: fragmentAddress, + adjacent: fragmentAddress ? [fragmentAddress, null] : [fragmentAddress], + }); + } + } + + return fragments; +}; + /** * Generates log filter fragment IDs from a log filter. * @@ -49,51 +91,53 @@ type FragmentReturnType = { export const getLogFilterFragmentIds = ({ chainId, address, - topics, - includeTransactionReceipts, + topic0, + topic1, + topic2, + topic3, + // includeTransactionReceipts, }: Omit): FragmentReturnType => { + // TODO(kyle) handle once column selection + const includeTransactionReceipts = false as boolean; + const fragments: FragmentReturnType = []; - const { topic0, topic1, topic2, topic3 } = parseTopics(topics); + const addressFragmentIds = getAddressFragmentIds(address); - if (isAddressFactory(address)) { - for (const fragmentAddress of Array.isArray(address.address) - ? address.address - : [address.address]) { - for (const fragmentTopic0 of Array.isArray(topic0) ? topic0 : [topic0]) { - for (const fragmentTopic1 of Array.isArray(topic1) - ? topic1 - : [topic1]) { - for (const fragmentTopic2 of Array.isArray(topic2) - ? topic2 - : [topic2]) { - for (const fragmentTopic3 of Array.isArray(topic3) - ? topic3 - : [topic3]) { - const id = - `log_${chainId}_${fragmentAddress}_${address.eventSelector}_${address.childAddressLocation}_${fragmentTopic0}_${fragmentTopic1}_${fragmentTopic2}_${fragmentTopic3}_${ - includeTransactionReceipts ? 1 : 0 - }` as const; - - const adjacent: FragmentId[] = []; - - for (const adjacentTopic0 of fragmentTopic0 === null - ? [fragmentTopic0] - : [fragmentTopic0, null]) { - for (const adjacentTopic1 of fragmentTopic1 === null - ? [fragmentTopic1] - : [fragmentTopic1, null]) { - for (const adjacentTopic2 of fragmentTopic2 === null - ? [fragmentTopic2] - : [fragmentTopic3, null]) { - for (const adjacentTopic3 of fragmentTopic3 === null - ? [fragmentTopic3] - : [fragmentTopic3, null]) { + for (const fragmentAddress of addressFragmentIds) { + for (const fragmentTopic0 of Array.isArray(topic0) ? topic0 : [topic0]) { + for (const fragmentTopic1 of Array.isArray(topic1) ? topic1 : [topic1]) { + for (const fragmentTopic2 of Array.isArray(topic2) + ? topic2 + : [topic2]) { + for (const fragmentTopic3 of Array.isArray(topic3) + ? topic3 + : [topic3]) { + const id = + `log_${chainId}_${fragmentAddress.id}_${fragmentTopic0 ?? null}_${fragmentTopic1 ?? null}_${fragmentTopic2 ?? null}_${fragmentTopic3 ?? null}_${ + includeTransactionReceipts ? 1 : 0 + }` as const; + + const adjacent: FragmentId[] = []; + + for (const adjacentAddress of fragmentAddress.adjacent) { + for (const adjacentTopic0 of fragmentTopic0 + ? [fragmentTopic0, null] + : [null]) { + for (const adjacentTopic1 of fragmentTopic1 + ? [fragmentTopic1, null] + : [null]) { + for (const adjacentTopic2 of fragmentTopic2 + ? [fragmentTopic2, null] + : [null]) { + for (const adjacentTopic3 of fragmentTopic3 + ? [fragmentTopic3, null] + : [null]) { for (const adjacentTxr of includeTransactionReceipts === true ? [1] : [0, 1]) { adjacent.push( - `log_${chainId}_${fragmentAddress}_${address.eventSelector}_${address.childAddressLocation}_${adjacentTopic0}_${adjacentTopic1}_${adjacentTopic2}_${adjacentTopic3}_${ + `log_${chainId}_${adjacentAddress}_${adjacentTopic0}_${adjacentTopic1}_${adjacentTopic2}_${adjacentTopic3}_${ adjacentTxr as 0 | 1 }`, ); @@ -102,67 +146,9 @@ export const getLogFilterFragmentIds = ({ } } } - - fragments.push({ id, adjacent }); } - } - } - } - } - } else { - for (const fragmentAddress of Array.isArray(address) - ? address - : [address ?? null]) { - for (const fragmentTopic0 of Array.isArray(topic0) ? topic0 : [topic0]) { - for (const fragmentTopic1 of Array.isArray(topic1) - ? topic1 - : [topic1]) { - for (const fragmentTopic2 of Array.isArray(topic2) - ? topic2 - : [topic2]) { - for (const fragmentTopic3 of Array.isArray(topic3) - ? topic3 - : [topic3]) { - const id = - `log_${chainId}_${fragmentAddress}_${fragmentTopic0}_${fragmentTopic1}_${fragmentTopic2}_${fragmentTopic3}_${ - includeTransactionReceipts ? 1 : 0 - }` as const; - - const adjacent: FragmentId[] = []; - - for (const adjacentAddress of fragmentAddress === null - ? [fragmentAddress] - : [fragmentAddress, null]) { - for (const adjacentTopic0 of fragmentTopic0 === null - ? [fragmentTopic0] - : [fragmentTopic0, null]) { - for (const adjacentTopic1 of fragmentTopic1 === null - ? [fragmentTopic1] - : [fragmentTopic1, null]) { - for (const adjacentTopic2 of fragmentTopic2 === null - ? [fragmentTopic2] - : [fragmentTopic3, null]) { - for (const adjacentTopic3 of fragmentTopic3 === null - ? [fragmentTopic3] - : [fragmentTopic3, null]) { - for (const adjacentTxr of includeTransactionReceipts === - true - ? [1] - : [0, 1]) { - adjacent.push( - `log_${chainId}_${adjacentAddress}_${adjacentTopic0}_${adjacentTopic1}_${adjacentTopic2}_${adjacentTopic3}_${ - adjacentTxr as 0 | 1 - }`, - ); - } - } - } - } - } - } - fragments.push({ id, adjacent }); - } + fragments.push({ id, adjacent }); } } } @@ -172,20 +158,6 @@ export const getLogFilterFragmentIds = ({ return fragments; }; -function parseTopics(topics: (Hex | Hex[] | null)[] | undefined) { - return { - topic0: topics?.[0] ?? null, - topic1: topics?.[1] ?? null, - topic2: topics?.[2] ?? null, - topic3: topics?.[3] ?? null, - } as { - topic0: Hex | Hex[] | null; - topic1: Hex | Hex[] | null; - topic2: Hex | Hex[] | null; - topic3: Hex | Hex[] | null; - }; -} - export const getBlockFilterFragmentId = ({ chainId, interval, @@ -199,59 +171,104 @@ export const getBlockFilterFragmentId = ({ ]; }; -export const getTraceFilterFragmentIds = ({ +export const getTransactionFilterFragmentIds = ({ chainId, fromAddress, toAddress, -}: Omit & { +}: Omit & { chainId: number; }): FragmentReturnType => { const fragments: FragmentReturnType = []; + const fromAddressFragmentIds = getAddressFragmentIds(fromAddress); + const toAddressFragmentIds = getAddressFragmentIds(toAddress); - if (isAddressFactory(toAddress)) { - for (const fragmentFromAddress of fromAddress === undefined - ? [null] - : fromAddress) { - for (const fragmentToAddress of Array.isArray(toAddress.address) - ? toAddress.address - : [toAddress.address]) { - const id = - `trace_${chainId}_${fragmentToAddress}_${toAddress.eventSelector}_${toAddress.childAddressLocation}_${fragmentFromAddress}` as const; + for (const fragmentFromAddress of fromAddressFragmentIds) { + for (const fragmentToAddress of toAddressFragmentIds) { + const id = + `transaction_${chainId}_${fragmentFromAddress.id}_${fragmentToAddress.id}_${0}` as const; - const adjacent: FragmentId[] = []; + const adjacent: FragmentId[] = []; - for (const adjacentFromAddress of fragmentFromAddress === null - ? [fragmentFromAddress] - : [fragmentFromAddress, null]) { + for (const adjacentFromAddress of fragmentFromAddress.adjacent) { + for (const adjacentToAddress of fragmentToAddress.adjacent) { adjacent.push( - `trace_${chainId}_${fragmentToAddress}_${toAddress.eventSelector}_${toAddress.childAddressLocation}_${adjacentFromAddress}`, + `transaction_${chainId}_${adjacentFromAddress}_${adjacentToAddress}_${0}`, ); } + } - fragments.push({ id, adjacent }); + fragments.push({ id, adjacent }); + } + } + + return fragments; +}; + +export const getTransferFilterFragmentIds = ({ + chainId, + fromAddress, + toAddress, +}: Omit & { + chainId: number; +}): FragmentReturnType => { + const fragments: FragmentReturnType = []; + const fromAddressFragmentIds = getAddressFragmentIds(fromAddress); + const toAddressFragmentIds = getAddressFragmentIds(toAddress); + + for (const fragmentFromAddress of fromAddressFragmentIds) { + for (const fragmentToAddress of toAddressFragmentIds) { + const id = + `transfer_${chainId}_${fragmentFromAddress.id}_${fragmentToAddress.id}` as const; + + const adjacent: FragmentId[] = []; + + for (const adjacentFromAddress of fragmentFromAddress.adjacent) { + for (const adjacentToAddress of fragmentToAddress.adjacent) { + adjacent.push( + `transfer_${chainId}_${adjacentFromAddress}_${adjacentToAddress}`, + ); + } } + + fragments.push({ id, adjacent }); } - } else { - for (const fragmentFromAddress of fromAddress === undefined - ? [null] - : fromAddress) { - for (const fragmentToAddress of toAddress === undefined - ? [null] - : (toAddress as Address[])) { + } + + return fragments; +}; + +export const getTraceFilterFragmentIds = ({ + chainId, + fromAddress, + toAddress, + callType, + functionSelector, +}: Omit & { + chainId: number; +}): FragmentReturnType => { + const fragments: FragmentReturnType = []; + const fromAddressFragmentIds = getAddressFragmentIds(fromAddress); + const toAddressFragmentIds = getAddressFragmentIds(toAddress); + + for (const fragmentFromAddress of fromAddressFragmentIds) { + for (const fragmentToAddress of toAddressFragmentIds) { + for (const fragmentFunctionSelector of Array.isArray(functionSelector) + ? functionSelector + : [functionSelector]) { const id = - `trace_${chainId}_${fragmentFromAddress}_${fragmentToAddress}` as const; + `trace_${chainId}_${fragmentFromAddress.id}_${fragmentToAddress.id}_${callType ?? null}_${fragmentFunctionSelector ?? null}_${0}` as const; const adjacent: FragmentId[] = []; - for (const adjacentFromAddress of fragmentFromAddress === null - ? [fragmentFromAddress] - : [fragmentFromAddress, null]) { - for (const adjacentToAddress of fragmentToAddress === null - ? [fragmentToAddress] - : [fragmentToAddress, null]) { - adjacent.push( - `trace_${chainId}_${adjacentFromAddress}_${adjacentToAddress}`, - ); + for (const adjacentFromAddress of fragmentFromAddress.adjacent) { + for (const adjacentToAddress of fragmentToAddress.adjacent) { + for (const adjacentFunctionSelector of fragmentFunctionSelector + ? [fragmentFunctionSelector, null] + : [null]) { + adjacent.push( + `trace_${chainId}_${adjacentFromAddress}_${adjacentToAddress}_${callType ?? null}_${adjacentFunctionSelector}_${0}`, + ); + } } } diff --git a/packages/core/src/sync/index.test.ts b/packages/core/src/sync/index.test.ts index 35672e75f..0fecead83 100644 --- a/packages/core/src/sync/index.test.ts +++ b/packages/core/src/sync/index.test.ts @@ -4,7 +4,12 @@ import { setupDatabaseServices, setupIsolatedDatabase, } from "@/_test/setup.js"; -import { testClient } from "@/_test/utils.js"; +import { + getBlocksConfigAndIndexingFunctions, + getNetwork, + testClient, +} from "@/_test/utils.js"; +import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; import { decodeCheckpoint, encodeCheckpoint, @@ -13,10 +18,9 @@ import { } from "@/utils/checkpoint.js"; import { wait } from "@/utils/wait.js"; import { promiseWithResolvers } from "@ponder/common"; -import { type TestContext, beforeEach, expect, test, vi } from "vitest"; +import { beforeEach, expect, test, vi } from "vitest"; import type { RawEvent } from "./events.js"; import { type Sync, createSync } from "./index.js"; -import type { BlockSource } from "./source.js"; beforeEach(setupCommon); beforeEach(setupAnvil); @@ -34,33 +38,28 @@ async function drainAsyncGenerator( return result; } -function getMultichainNetworksAndSources(context: TestContext) { - const mainnet = context.networks[0]; - const optimism = { ...mainnet, name: "optimism", chainId: 10 }; - - const sources = [ - context.sources[4], - { - ...context.sources[4], - networkName: optimism.name, - filter: { - ...context.sources[4].filter, - chainId: 10, - }, - }, - ] as [BlockSource, BlockSource]; - - return { networks: [mainnet, optimism], sources }; -} - test("createSync()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const sync = await createSync({ syncStore, - sources: [context.sources[0]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), @@ -76,11 +75,30 @@ test("createSync()", async (context) => { test("getEvents() returns events", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 1 }); + + // finalized block: 1 + network.finalityBlockCount = 0; + const sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), @@ -89,7 +107,7 @@ test("getEvents() returns events", async (context) => { const events = await drainAsyncGenerator(sync.getEvents()); expect(events).toBeDefined(); - expect(events).toHaveLength(1); + expect(events).toHaveLength(2); await sync.kill(); @@ -99,11 +117,30 @@ test("getEvents() returns events", async (context) => { test("getEvents() with cache", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 1 }); + + // finalized block: 1 + network.finalityBlockCount = 0; + let sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), @@ -115,9 +152,9 @@ test("getEvents() with cache", async (context) => { sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), @@ -128,7 +165,7 @@ test("getEvents() with cache", async (context) => { expect(spy).toHaveBeenCalledTimes(0); expect(events).toBeDefined(); - expect(events).toHaveLength(1); + expect(events).toHaveLength(2); await sync.kill(); @@ -138,14 +175,32 @@ test("getEvents() with cache", async (context) => { test("getEvents() end block", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - context.networks[0].finalityBlockCount = 1; - context.sources[4].filter.toBlock = 4; + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 2 }); + + // finalized block: 2 + network.finalityBlockCount = 0; + + sources[0]!.filter.toBlock = 1; const sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), @@ -171,15 +226,46 @@ test("getEvents() end block", async (context) => { // multiple blocks with the same hash and different chain IDs. test.skip("getEvents() multichain", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const { networks, sources } = getMultichainNetworksAndSources(context); - sources[1].filter.toBlock = 1; + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + + const { sources: sources1, networks: networks1 } = + await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const { sources: sources2, networks: networks2 } = + await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 2 }); + + // finalized block: 2 + networks1[0]!.finalityBlockCount = 0; + networks2[0]!.finalityBlockCount = 0; + + sources2[0]!.filter.chainId = 2; + sources2[0]!.filter.toBlock = 1; + networks2[0]!.chainId = 2; const sync = await createSync({ syncStore, - sources: [sources[0], sources[1]], + sources: [...sources1, ...sources2], common: context.common, - networks, + networks: [...networks1, ...networks2], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), @@ -198,11 +284,30 @@ test.skip("getEvents() multichain", async (context) => { test("getEvents() updates status", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 2 }); + + // finalized block: 2 + network.finalityBlockCount = 0; + const sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), @@ -212,8 +317,8 @@ test("getEvents() updates status", async (context) => { const status = sync.getStatus(); - expect(status[context.networks[0].name]?.ready).toBe(false); - expect(status[context.networks[0].name]?.block?.number).toBe(1); + expect(status[network.name]?.ready).toBe(false); + expect(status[network.name]?.block?.number).toBe(2); await sync.kill(); @@ -223,17 +328,30 @@ test("getEvents() updates status", async (context) => { test("getEvents() pagination", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = context.networks[0]; + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 2 }); + + // finalized block: 2 network.finalityBlockCount = 0; - context.common.options = { - ...context.common.options, - syncEventsQuerySize: 1, - }; + context.common.options.syncEventsQuerySize = 1; const sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, networks: [network], onRealtimeEvent: async () => {}, @@ -252,11 +370,30 @@ test("getEvents() pagination", async (context) => { test("getEvents() initialCheckpoint", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 2 }); + + // finalized block: 2 + network.finalityBlockCount = 0; + const sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(maxCheckpoint), @@ -275,21 +412,43 @@ test("getEvents() initialCheckpoint", async (context) => { test("getEvents() refetches finalized block", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 2 }); + + // finalized block: 2 + network.finalityBlockCount = 0; + context.common.options.syncHandoffStaleSeconds = 0.5; const sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: encodeCheckpoint(maxCheckpoint), }); + // cause `latestFinalizedFetch` to be updated + const gen = sync.getEvents(); + await wait(1000); - await drainAsyncGenerator(sync.getEvents()); + await drainAsyncGenerator(gen); await sync.kill(); @@ -299,11 +458,27 @@ test("getEvents() refetches finalized block", async (context) => { test("startRealtime()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + await testClient.mine({ blocks: 2 }); + const sync = await createSync({ syncStore, - sources: [context.sources[4]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), @@ -315,8 +490,8 @@ test("startRealtime()", async (context) => { const status = sync.getStatus(); - expect(status[context.networks[0].name]?.ready).toBe(true); - expect(status[context.networks[0].name]?.block?.number).toBe(1); + expect(status[network.name]?.ready).toBe(true); + expect(status[network.name]?.block?.number).toBe(1); await sync.kill(); @@ -326,14 +501,30 @@ test("startRealtime()", async (context) => { test("onEvent() handles block", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const promise = promiseWithResolvers(); const events: RawEvent[] = []; + await testClient.mine({ blocks: 1 }); + const sync = await createSync({ syncStore, - sources: [context.sources[0]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async (event) => { if (event.type === "block") { events.push(...event.events); @@ -350,7 +541,7 @@ test("onEvent() handles block", async (context) => { await promise.promise; - expect(events).toHaveLength(2); + expect(events).toHaveLength(1); await sync.kill(); @@ -360,14 +551,32 @@ test("onEvent() handles block", async (context) => { test("onEvent() handles finalize", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const promise = promiseWithResolvers(); let checkpoint: string; + // finalized block: 0 + + network.finalityBlockCount = 2; + const sync = await createSync({ syncStore, - sources: [context.sources[0]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async (event) => { if (event.type === "finalize") { checkpoint = event.checkpoint; @@ -386,7 +595,7 @@ test("onEvent() handles finalize", async (context) => { await promise.promise; - expect(decodeCheckpoint(checkpoint!).blockNumber).toBe(5n); + expect(decodeCheckpoint(checkpoint!).blockNumber).toBe(2n); await sync.kill(); @@ -395,30 +604,55 @@ test("onEvent() handles finalize", async (context) => { test.todo("onEvent() handles reorg"); -test("onEvent() multichain end block", async (context) => { +test("onEvent() multichain gets all events", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const { networks, sources } = getMultichainNetworksAndSources(context); - sources[1].filter.toBlock = 1; + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources: sources1, networks: networks1 } = + await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const { sources: sources2, networks: networks2 } = + await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // finalized block: 0 + + sources2[0]!.filter.chainId = 2; + networks2[0]!.chainId = 2; const promise = promiseWithResolvers(); const sync = await createSync({ syncStore, - sources: [sources[0], sources[1]], + sources: [...sources1, ...sources2], common: context.common, - networks, + networks: [...networks1, ...networks2], onRealtimeEvent: async (event) => { if (event.type === "block") { - if (event.events.length > 0) { - promise.resolve(); - } + promise.resolve(); } }, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), }); + await testClient.mine({ blocks: 1 }); + await drainAsyncGenerator(sync.getEvents()); await sync.startRealtime(); @@ -429,28 +663,57 @@ test("onEvent() multichain end block", async (context) => { await cleanup(); }); -test("onEvent() multichain gets all events", async (context) => { + +test("onEvent() multichain end block", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const { networks, sources } = getMultichainNetworksAndSources(context); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources: sources1, networks: networks1 } = + await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + const { sources: sources2, networks: networks2 } = + await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + + // finalized block: 0 + + sources2[0]!.filter.chainId = 2; + sources2[0]!.filter.toBlock = 0; + networks2[0]!.chainId = 2; const promise = promiseWithResolvers(); const sync = await createSync({ syncStore, - sources: [sources[0], sources[1]], + sources: [...sources1, ...sources2], common: context.common, - networks, + networks: [...networks1, ...networks2], onRealtimeEvent: async (event) => { if (event.type === "block") { - if (event.events.length > 0) { - promise.resolve(); - } + promise.resolve(); } }, onFatalError: () => {}, initialCheckpoint: encodeCheckpoint(zeroCheckpoint), }); + await testClient.mine({ blocks: 1 }); + await drainAsyncGenerator(sync.getEvents()); await sync.startRealtime(); @@ -465,15 +728,35 @@ test("onEvent() multichain gets all events", async (context) => { test("onEvent() handles endBlock finalization", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const promise = promiseWithResolvers(); - context.sources[0].filter.toBlock = 4; + // finalized block: 0 + + await testClient.mine({ blocks: 2 }); + + network.finalityBlockCount = 2; + + sources[0]!.filter.toBlock = 1; const sync = await createSync({ syncStore, - sources: [context.sources[0]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async (event) => { if (event.type === "finalize") { promise.resolve(); @@ -483,7 +766,7 @@ test("onEvent() handles endBlock finalization", async (context) => { initialCheckpoint: encodeCheckpoint(zeroCheckpoint), }); - await testClient.mine({ blocks: 4 }); + await testClient.mine({ blocks: 2 }); await drainAsyncGenerator(sync.getEvents()); @@ -499,13 +782,29 @@ test("onEvent() handles endBlock finalization", async (context) => { test("onEvent() handles errors", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const network = getNetwork(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + options: { + ponderDir: "", + rootDir: "", + }, + }); + const promise = promiseWithResolvers(); + // finalized block: 0 + const sync = await createSync({ syncStore, - sources: [context.sources[0]], + sources, common: context.common, - networks: context.networks, + networks: [network], onRealtimeEvent: async () => {}, onFatalError: () => { promise.resolve(); diff --git a/packages/core/src/sync/index.ts b/packages/core/src/sync/index.ts index 4a0467a7a..38a22df7c 100644 --- a/packages/core/src/sync/index.ts +++ b/packages/core/src/sync/index.ts @@ -635,7 +635,7 @@ export const createSync = async (args: CreateSyncParameters): Promise => { filters: event.filters, logs: event.logs, factoryLogs: event.factoryLogs, - callTraces: event.callTraces, + traces: event.traces, transactions: event.transactions, transactionReceipts: event.transactionReceipts, }; @@ -750,7 +750,11 @@ export const createSync = async (args: CreateSyncParameters): Promise => { }), args.syncStore.insertTransactions({ transactions: finalizedEventData.flatMap( - ({ transactions }) => transactions, + ({ transactions, block }) => + transactions.map((transaction) => ({ + transaction, + block, + })), ), chainId: network.chainId, }), @@ -760,9 +764,16 @@ export const createSync = async (args: CreateSyncParameters): Promise => { ), chainId: network.chainId, }), - args.syncStore.insertCallTraces({ - callTraces: finalizedEventData.flatMap(({ callTraces, block }) => - callTraces.map((callTrace) => ({ callTrace, block })), + args.syncStore.insertTraces({ + traces: finalizedEventData.flatMap( + ({ traces, block, transactions }) => + traces.map((trace) => ({ + trace, + block, + transaction: transactions.find( + (t) => t.hash === trace.transactionHash, + )!, + })), ), chainId: network.chainId, }), @@ -979,7 +990,7 @@ export const getCachedBlock = ({ }): Promise | undefined => { const latestCompletedBlocks = sources.map(({ filter }) => { const requiredInterval = [ - filter.fromBlock, + filter.fromBlock ?? 0, filter.toBlock ?? Number.POSITIVE_INFINITY, ] satisfies Interval; const cachedIntervals = historicalSync.intervalsCache.get(filter)!; @@ -991,7 +1002,9 @@ export const getCachedBlock = ({ if (completedIntervals.length === 0) return undefined; const earliestCompletedInterval = completedIntervals[0]!; - if (earliestCompletedInterval[0] !== filter.fromBlock) return undefined; + if (earliestCompletedInterval[0] !== (filter.fromBlock ?? 0)) { + return undefined; + } return earliestCompletedInterval[1]; }); @@ -1008,7 +1021,8 @@ export const getCachedBlock = ({ if ( latestCompletedBlocks.every( (block, i) => - block !== undefined || sources[i]!.filter.fromBlock > minCompletedBlock, + block !== undefined || + (sources[i]!.filter.fromBlock ?? 0) > minCompletedBlock, ) ) { return _eth_getBlockByNumber(requestQueue, { @@ -1077,7 +1091,7 @@ export async function* localHistoricalSyncGenerator({ intervalDifference( [ [ - filter.fromBlock, + filter.fromBlock ?? 0, Math.min( filter.toBlock ?? Number.POSITIVE_INFINITY, totalInterval[1], diff --git a/packages/core/src/sync/source.ts b/packages/core/src/sync/source.ts index 185b9b3e6..aefdb1d5e 100644 --- a/packages/core/src/sync/source.ts +++ b/packages/core/src/sync/source.ts @@ -1,17 +1,38 @@ import type { AbiEvents, AbiFunctions } from "@/sync/abi.js"; import type { SyncLog } from "@/types/sync.js"; +import type { Trace } from "@/utils/debug.js"; import type { Abi, Address, Hex, LogTopic } from "viem"; -export type Source = ContractSource | BlockSource; +export type Source = ContractSource | AccountSource | BlockSource; export type ContractSource< filter extends "log" | "trace" = "log" | "trace", factory extends Factory | undefined = Factory | undefined, + fromFactory extends Factory | undefined = Factory | undefined, + toFactory extends Factory | undefined = Factory | undefined, > = { - filter: filter extends "log" ? LogFilter : CallTraceFilter; + filter: filter extends "log" + ? LogFilter + : TraceFilter; } & ContractMetadata; + +export type AccountSource< + filter extends "transaction" | "transfer" = "transaction" | "transfer", + fromFactory extends Factory | undefined = Factory | undefined, + toFactory extends Factory | undefined = Factory | undefined, +> = { + filter: filter extends "transaction" + ? TransactionFilter + : TransferFilter; +} & AccountMetadata; + export type BlockSource = { filter: BlockFilter } & BlockMetadata; -export type Filter = LogFilter | BlockFilter | CallTraceFilter; +export type Filter = + | LogFilter + | BlockFilter + | TransferFilter + | TransactionFilter + | TraceFilter; export type Factory = LogFactory; export type ContractMetadata = { @@ -22,6 +43,11 @@ export type ContractMetadata = { name: string; networkName: string; }; +export type AccountMetadata = { + type: "account"; + name: string; + networkName: string; +}; export type BlockMetadata = { type: "block"; name: string; @@ -34,9 +60,11 @@ export type LogFilter< type: "log"; chainId: number; address: factory extends Factory ? factory : Address | Address[] | undefined; - topics: LogTopic[]; - includeTransactionReceipts: boolean; - fromBlock: number; + topic0: LogTopic | undefined; + topic1: LogTopic | undefined; + topic2: LogTopic | undefined; + topic3: LogTopic | undefined; + fromBlock: number | undefined; toBlock: number | undefined; }; @@ -45,20 +73,60 @@ export type BlockFilter = { chainId: number; interval: number; offset: number; - fromBlock: number; + fromBlock: number | undefined; toBlock: number | undefined; }; -export type CallTraceFilter< - factory extends Factory | undefined = Factory | undefined, +export type TransferFilter< + fromFactory extends Factory | undefined = Factory | undefined, + toFactory extends Factory | undefined = Factory | undefined, +> = { + type: "transfer"; + chainId: number; + fromAddress: fromFactory extends Factory + ? fromFactory + : Address | Address[] | undefined; + toAddress: toFactory extends Factory + ? fromFactory + : Address | Address[] | undefined; + includeReverted: boolean; + fromBlock: number | undefined; + toBlock: number | undefined; +}; + +export type TransactionFilter< + fromFactory extends Factory | undefined = Factory | undefined, + toFactory extends Factory | undefined = Factory | undefined, +> = { + type: "transaction"; + chainId: number; + fromAddress: fromFactory extends Factory + ? fromFactory + : Address | Address[] | undefined; + toAddress: toFactory extends Factory + ? toFactory + : Address | Address[] | undefined; + includeReverted: boolean; + fromBlock: number | undefined; + toBlock: number | undefined; +}; + +export type TraceFilter< + fromFactory extends Factory | undefined = Factory | undefined, + toFactory extends Factory | undefined = Factory | undefined, > = { - type: "callTrace"; + type: "trace"; chainId: number; - fromAddress: Address[] | undefined; - toAddress: factory extends Factory ? factory : Address[] | undefined; - functionSelectors: Hex[]; - includeTransactionReceipts: boolean; - fromBlock: number; + fromAddress: fromFactory extends Factory + ? fromFactory + : Address | Address[] | undefined; + toAddress: toFactory extends Factory + ? toFactory + : Address | Address[] | undefined; + functionSelector: Hex | Hex[] | undefined; + callType: Trace["result"]["type"] | undefined; + includeReverted: boolean; + fromBlock: number | undefined; toBlock: number | undefined; }; diff --git a/packages/core/src/sync/transport.test.ts b/packages/core/src/sync/transport.test.ts index d76a3a4fe..aa7a7eb53 100644 --- a/packages/core/src/sync/transport.test.ts +++ b/packages/core/src/sync/transport.test.ts @@ -1,11 +1,14 @@ +import { ALICE } from "@/_test/constants.js"; import { setupAnvil, setupCommon, setupDatabaseServices, setupIsolatedDatabase, } from "@/_test/setup.js"; -import { anvil, publicClient } from "@/_test/utils.js"; -import type { Transport } from "viem"; +import { deployErc20, mintErc20 } from "@/_test/simulate.js"; +import { anvil, getNetwork, publicClient } from "@/_test/utils.js"; +import { createRequestQueue } from "@/utils/requestQueue.js"; +import { type Transport, parseEther } from "viem"; import { toHex } from "viem"; import { assertType, beforeEach, expect, test, vi } from "vitest"; import { cachedTransport } from "./transport.js"; @@ -15,11 +18,16 @@ beforeEach(setupAnvil); beforeEach(setupIsolatedDatabase); test("default", async (context) => { - const { requestQueues } = context; + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + const { syncStore, cleanup } = await setupDatabaseServices(context); const transport = cachedTransport({ - requestQueue: requestQueues[0], + requestQueue, syncStore, }); @@ -45,12 +53,17 @@ test("default", async (context) => { }); test("request() block dependent method", async (context) => { - const { requestQueues } = context; + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + const { syncStore, cleanup } = await setupDatabaseServices(context); const blockNumber = await publicClient.getBlockNumber(); const transport = cachedTransport({ - requestQueue: requestQueues[0], + requestQueue, syncStore, })({ chain: anvil, @@ -80,12 +93,26 @@ test("request() block dependent method", async (context) => { }); test("request() non-block dependent method", async (context) => { - const { requestQueues } = context; + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + + const { address } = await deployErc20({ sender: ALICE }); + await mintErc20({ + erc20: address, + to: ALICE, + amount: parseEther("1"), + sender: ALICE, + }); + const { syncStore, cleanup } = await setupDatabaseServices(context); - const block = await publicClient.getBlock({ blockNumber: 2n }); + const blockNumber = await publicClient.getBlockNumber(); + const block = await publicClient.getBlock({ blockNumber: blockNumber }); const transport = cachedTransport({ - requestQueue: requestQueues[0], + requestQueue, syncStore, })({ chain: anvil, @@ -115,10 +142,15 @@ test("request() non-block dependent method", async (context) => { }); test("request() non-cached method", async (context) => { - const { requestQueues } = context; + const network = getNetwork(); + const requestQueue = createRequestQueue({ + network, + common: context.common, + }); + const { syncStore, cleanup } = await setupDatabaseServices(context); const transport = cachedTransport({ - requestQueue: requestQueues[0], + requestQueue, syncStore, })({ chain: anvil, diff --git a/packages/core/src/types/eth.ts b/packages/core/src/types/eth.ts index e64f09323..4b1d288f3 100644 --- a/packages/core/src/types/eth.ts +++ b/packages/core/src/types/eth.ts @@ -159,9 +159,7 @@ export type Log = { transactionIndex: number; }; -/** - * A confirmed Ethereum transaction receipt. - */ +/** A confirmed Ethereum transaction receipt. */ export type TransactionReceipt = { /** Hash of block containing this transaction */ blockHash: Hash; @@ -193,41 +191,48 @@ export type TransactionReceipt = { type: TransactionType; }; -type _TraceAddress = number | _TraceAddress[]; -type TraceAddress = _TraceAddress[]; - -/** - * An Ethereum call trace. - */ -export type CallTrace = { - /** Globally unique identifier for this trace (`${transactionHash}-${traceAddress}`) */ +export type Trace = { + /** Globally unique identifier for this trace (`${transactionHash}-${tracePosition}`) */ id: string; - /** Message sender */ + /** The type of the call. */ + type: + | "CALL" + | "CALLCODE" + | "DELEGATECALL" + | "STATICCALL" + | "CREATE" + | "CREATE2" + | "SELFDESTRUCT"; + /** The address of that initiated the call. */ from: Address; - /** Message receipient */ - to: Address; - /** Amount of gas allocated to this call */ + /** The address of the contract that was called. */ + to: Address | null; + /** How much gas was left before the call. */ gas: bigint; - /** Value in wei sent with this call */ - value: bigint; - /** Calldata sent with this call */ - input: Hex; - /** Contains return data */ - output: Hex; - /** Total used gas by this trace */ + /** How much gas was used by the call. */ gasUsed: bigint; - /** Number of traces created by this trace */ - subtraces: number; - /** Description of this traces position within all traces in the transaction */ - traceAddress: TraceAddress; - /** Hash of block containing this trace */ - blockHash: Hash; - /** Number of block containing this trace */ - blockNumber: bigint; - /** Hash of the transaction that created this trace */ - transactionHash: Hash; - /** Index of the transaction that created this trace */ - transactionIndex: number; - /** EVM opcode used to make this call */ - callType: "call" | "staticcall" | "delegatecall" | "callcode"; + /** Calldata input. */ + input: Hex; + /** Output of the call, if any. */ + output?: Hex; + /** Error message, if any. */ + error?: string; + /** Why this call reverted, if it reverted. */ + revertReason?: string; + /** Value transferred. */ + value: bigint | null; + /** Index of this trace in the transaction. */ + traceIndex: number; + /** Number of subcalls. */ + subcalls: number; +}; + +/** A native token transfer. */ +export type Transfer = { + /** The address that sent the transfer */ + from: Address; + /** The address that received the transfer */ + to: Address; + /** The amount of tokens transferred */ + value: bigint; }; diff --git a/packages/core/src/types/sync.ts b/packages/core/src/types/sync.ts index 608cf5183..a190a136f 100644 --- a/packages/core/src/types/sync.ts +++ b/packages/core/src/types/sync.ts @@ -1,5 +1,5 @@ +import type { Trace } from "@/utils/debug.js"; import type { - Address, BlockTag, Hex, Log, @@ -12,87 +12,12 @@ export type SyncBlock = RpcBlock, true>; export type SyncLog = Log; export type SyncTransaction = RpcTransaction; export type SyncTransactionReceipt = RpcTransactionReceipt; -export type SyncTrace = - | SyncCallTrace - | SyncCreateTrace - | SyncRewardTrace - | SyncSuicideTrace; +export type SyncTrace = { + trace: Trace["result"] & { index: number; subcalls: number }; + transactionHash: Trace["txHash"]; +}; export type LightBlock = Pick< SyncBlock, "hash" | "parentHash" | "number" | "timestamp" >; - -export type SyncCallTrace = { - action: { - callType: "call" | "delegatecall" | "staticcall"; - from: Address; - gas: Hex; - input: Hex; - to: Address; - value: Hex; - }; - blockHash: Hex; - blockNumber: Hex; - error?: string; - result: { - gasUsed: Hex; - output: Hex; - } | null; - subtraces: number; - traceAddress: number[]; - transactionHash: Hex; - transactionPosition: number; - type: "call"; -}; - -export type SyncCreateTrace = { - action: { - from: Address; - gas: Hex; - init: Hex; - value: Hex; - }; - blockHash: Hex; - blockNumber: Hex; - result: { - address: Address; - code: Hex; - gasUsed: Hex; - } | null; - subtraces: number; - traceAddress: number[]; - transactionHash: Hex; - transactionPosition: number; - type: "create"; -}; - -export type SyncSuicideTrace = { - action: { - address: Address; - refundAddress: Address; - balance: Hex; - }; - blockHash: Hex; - blockNumber: Hex; - result: null; - subtraces: number; - traceAddress: number[]; - transactionHash: Hex; - transactionPosition: number; - type: "suicide"; -}; - -export type SyncRewardTrace = { - action: { - author: Address; - rewardType: "block" | "uncle"; - value: Hex; - }; - blockHash: Hex; - blockNumber: Hex; - result: null; - subtraces: number; - traceAddress: number[]; - type: "reward"; -}; diff --git a/packages/core/src/types/virtual.test-d.ts b/packages/core/src/types/virtual.test-d.ts index 160d8befc..983ea242b 100644 --- a/packages/core/src/types/virtual.test-d.ts +++ b/packages/core/src/types/virtual.test-d.ts @@ -5,8 +5,8 @@ import { assertType, test } from "vitest"; import type { Db } from "./db.js"; import type { Block, - CallTrace, Log, + Trace, Transaction, TransactionReceipt, } from "./eth.js"; @@ -72,6 +72,12 @@ const config = createConfig({ }, }, }, + accounts: { + a1: { + address: "0x", + network: "mainnet", + }, + }, blocks: { b1: { interval: 2, @@ -94,6 +100,7 @@ test("FormatEventNames without filter", () => { { contract: { abi: abi; network: "" }; }, + {}, {} >; @@ -113,6 +120,7 @@ test("FormatEvent names with filter", () => { { contract: { abi: abi; network: ""; filter: { event: "Event1()" } }; }, + {}, {} >; @@ -132,6 +140,7 @@ test("FormatEvent names with filter array", () => { filter: { event: readonly ["Event1()"] }; }; }, + {}, {} >; @@ -147,6 +156,7 @@ test("FormatEventNames with semi-weak abi", () => { { contract: { abi: abi[number][]; network: "" }; }, + {}, {} >; @@ -166,6 +176,7 @@ test("FormatEventNames with weak abi", () => { { contract: { abi: Abi; network: "" }; }, + {}, {} >; @@ -179,6 +190,7 @@ test("FormatEventNames with functions", () => { { contract: { abi: abi; network: ""; includeCallTraces: true }; }, + {}, {} >; @@ -195,10 +207,34 @@ test("FormatEventNames with functions", () => { assertType({} as any as a); }); +test("FormatEventName with accounts", () => { + type a = Virtual.FormatEventNames< + // ^? + {}, + { account: { address: "0x"; network: "mainnet" } }, + {} + >; + + assertType( + {} as any as + | "account:transfer:from" + | "account:transfer:to" + | "account:transaction:from" + | "account:transaction:to", + ); + assertType< + | "account:transfer:from" + | "account:transfer:to" + | "account:transaction:from" + | "account:transaction:to" + >({} as any as a); +}); + test("FormatEventName with blocks", () => { type a = Virtual.FormatEventNames< // ^? {}, + {}, { block: { interval: 2; startBlock: 1; network: "mainnet" } } >; @@ -381,7 +417,7 @@ test("Event with functions", () => { type expectedEvent = { args: readonly [Address]; result: bigint; - trace: CallTrace; + trace: Trace; block: Block; transaction: Transaction; }; @@ -397,9 +433,41 @@ test("Event with functions and no inputs or outputs", () => { type expectedEvent = { args: never; result: never; - trace: CallTrace; + trace: Trace; + block: Block; + transaction: Transaction; + }; + + assertType({} as any as expectedEvent); + assertType({} as any as a); +}); + +test("Event with account transaction", () => { + type a = Virtual.Event; + // ^? + + type expectedEvent = { + block: Block; + transaction: Transaction; + }; + + assertType({} as any as expectedEvent); + assertType({} as any as a); +}); + +test("Event with account transfer", () => { + type a = Virtual.Event; + // ^? + + type expectedEvent = { + transfer: { + from: Address; + to: Address; + value: bigint; + }; block: Block; transaction: Transaction; + trace: Trace; }; assertType({} as any as expectedEvent); diff --git a/packages/core/src/types/virtual.ts b/packages/core/src/types/virtual.ts index f3096b307..393addab4 100644 --- a/packages/core/src/types/virtual.ts +++ b/packages/core/src/types/virtual.ts @@ -10,10 +10,11 @@ import type { Drizzle, Schema } from "@/drizzle/index.js"; import type { ReadOnlyClient } from "@/indexing/ponderActions.js"; import type { Block, - CallTrace, Log, + Trace, Transaction, TransactionReceipt, + Transfer, } from "@/types/eth.js"; import type { ApiRegistry } from "./api.js"; import type { Db } from "./db.js"; @@ -44,14 +45,18 @@ export namespace Virtual { safeFunctionNames = SafeFunctionNames, > = string extends safeFunctionNames ? never : safeFunctionNames; - /** "{ContractName}:{EventName}" | "{ContractName}.{FunctionName}()" | "{SourceName}:block" . */ + /** "{ContractName}:{EventName}" | "{ContractName}.{FunctionName}()" | "{SourceName}:block" | "{SourceName}:transaction:from" . */ export type FormatEventNames< contracts extends Config["contracts"], + accounts extends Config["accounts"], blocks extends Config["blocks"], > = | { [name in keyof contracts]: `${name & string}:${_FormatEventNames | Setup}`; }[keyof contracts] + | { + [name in keyof accounts]: `${name & string}:${"transaction" | "transfer"}:${"from" | "to"}`; + }[keyof accounts] | { [name in keyof blocks]: `${name & string}:block`; }[keyof blocks] @@ -97,6 +102,7 @@ export namespace Virtual { export type EventNames = FormatEventNames< config["contracts"], + config["accounts"], config["blocks"] >; @@ -107,37 +113,56 @@ export namespace Virtual { contractName extends ExtractSourceName = ExtractSourceName, eventName extends ExtractEventName = ExtractEventName, > = name extends `${string}:block` - ? { block: Prettify } - : name extends `${string}.${string}` - ? Prettify< + ? // 1. block event + { block: Prettify } + : name extends `${string}:transaction:${"from" | "to"}` + ? // 2. transaction event + { + block: Prettify; + // TODO(kyle) annotate with `status` + transaction: Prettify; + } + : name extends `${string}:transfer:${"from" | "to"}` + ? // 3. transfer event { - args: FormatFunctionArgs< - config["contracts"][contractName]["abi"], - eventName - >; - result: FormatFunctionResult< - config["contracts"][contractName]["abi"], - eventName - >; - trace: Prettify; + transfer: Prettify; block: Prettify; transaction: Prettify; - } & FormatTransactionReceipts - > - : eventName extends Setup - ? never - : Prettify< - { - name: eventName; - args: FormatEventArgs< - config["contracts"][contractName]["abi"], - eventName + trace: Prettify; + } + : name extends `${string}.${string}` + ? // 4. call trace event + Prettify< + { + args: FormatFunctionArgs< + config["contracts"][contractName]["abi"], + eventName + >; + result: FormatFunctionResult< + config["contracts"][contractName]["abi"], + eventName + >; + trace: Prettify; + block: Prettify; + transaction: Prettify; + } & FormatTransactionReceipts + > + : eventName extends Setup + ? // 5. setup event + never + : // 6. log event + Prettify< + { + name: eventName; + args: FormatEventArgs< + config["contracts"][contractName]["abi"], + eventName + >; + log: Prettify; + block: Prettify; + transaction: Prettify; + } & FormatTransactionReceipts >; - log: Prettify; - block: Prettify; - transaction: Prettify; - } & FormatTransactionReceipts - >; type ContextContractProperty = Exclude< keyof Config["contracts"][string], diff --git a/packages/core/src/utils/checkpoint.ts b/packages/core/src/utils/checkpoint.ts index 4f99900ab..3040e092e 100644 --- a/packages/core/src/utils/checkpoint.ts +++ b/packages/core/src/utils/checkpoint.ts @@ -30,9 +30,10 @@ const CHECKPOINT_LENGTH = EVENT_INDEX_DIGITS; export const EVENT_TYPES = { + transactions: 2, blocks: 5, logs: 5, - callTraces: 7, + traces: 7, } as const; export const encodeCheckpoint = (checkpoint: Checkpoint) => { diff --git a/packages/core/src/utils/debug.ts b/packages/core/src/utils/debug.ts new file mode 100644 index 000000000..f0a57977c --- /dev/null +++ b/packages/core/src/utils/debug.ts @@ -0,0 +1,110 @@ +import type { Address, Hash, Hex, LogTopic } from "viem"; + +/** @see https://github.com/alloy-rs/alloy/blob/main/crates/rpc-types-trace/src/geth/call.rs */ +/** @see https://github.com/alloy-rs/alloy/blob/main/crates/rpc-types-trace/src/common.rs */ +/** @see https://github.com/paradigmxyz/reth/blob/main/crates/rpc/rpc/src/debug.rs */ + +/** Result type for geth style transaction trace. */ +export type Trace = { + /** Transaction hash. */ + txHash: Hex; + /** Trace results produced by the tracer. */ + result: CallFrame; +}; + +/** + * The response object for `debug_traceBlockByNumber` and `debug_traceBlockByHash` + * with `"tracer": "callTracer"`. + */ +type CallFrame = { + /** The type of the call. */ + type: + | "CALL" + | "CALLCODE" + | "DELEGATECALL" + | "STATICCALL" + | "CREATE" + | "CREATE2" + | "SELFDESTRUCT"; + /** The address of that initiated the call. */ + from: Address; + /** The address of the contract that was called. */ + to?: Address; + /** How much gas was left before the call. */ + gas: Hex; + /** How much gas was used by the call. */ + gasUsed: Hex; + /** Calldata input. */ + input: Hex; + /** Output of the call, if any. */ + output?: Hex; + /** Error message, if any. */ + error?: string; + /** Why this call reverted, if it reverted. */ + revertReason?: string; + /** Recorded child calls. */ + calls?: CallFrame[]; + /** Logs emitted by this call. */ + logs?: CallLogFrame[]; + /** Value transferred. */ + value?: Hex; +}; + +/** Represents a recorded log that is emitted during a trace call. */ +type CallLogFrame = { + /** The address of the contract that was called. */ + address: Address; + /** The topics of the log. */ + topics: LogTopic[]; + /** The data of the log. */ + data: Hex; + /** The position of the log relative to subcalls within the same trace. */ + position: number; +}; + +/** The configuration for the call tracer. */ +type CallConfig = { + /** When set to true, this will only trace the primary (top-level) call and not any sub-calls. */ + onlyTopCall?: boolean; + /** When set to true, this will include the logs emitted by the call. */ + withLog?: boolean; +}; + +export type DebugRpcSchema = [ + /** + * @description Returns tracing results by executing all transactions in the block specified by the block hash + * + * @example + * provider.request({ method: 'debug_traceBlockByHash', params: ['0x...', { tracer: "callTracer" }] }) + * // => { + * // txHash: '0x5a42...', + * // result: [...], + * // } + */ + { + Method: "debug_traceBlockByHash"; + Parameters: [ + hash: Hash, + tracingOptions: { tracer: "callTracer"; tracerConfig?: CallConfig }, + ]; + ReturnType: Trace[]; + }, + /** + * @description Returns tracing results by executing all transactions in the block specified by the block hash + * + * @example + * provider.request({ method: 'debug_traceBlockByNumber', params: ['0x1b4', { tracer: "callTracer" }] }) + * // => { + * // txHash: '0x5a42...', + * // result: [...], + * // } + */ + { + Method: "debug_traceBlockByNumber"; + Parameters: [ + block: Hex, + tracingOptions: { tracer: "callTracer"; tracerConfig?: CallConfig }, + ]; + ReturnType: Trace[]; + }, +]; diff --git a/packages/core/src/utils/interval.ts b/packages/core/src/utils/interval.ts index 6c409007f..62508a507 100644 --- a/packages/core/src/utils/interval.ts +++ b/packages/core/src/utils/interval.ts @@ -1,3 +1,5 @@ +import { range } from "./range.js"; + export type Interval = [number, number]; /** @@ -191,3 +193,7 @@ export function getChunks({ return _chunks; } + +export function intervalRange(interval: Interval) { + return range(interval[0], interval[1] + 1); +} diff --git a/packages/core/src/utils/requestQueue.test.ts b/packages/core/src/utils/requestQueue.test.ts index 71f92eb50..2c29071ef 100644 --- a/packages/core/src/utils/requestQueue.test.ts +++ b/packages/core/src/utils/requestQueue.test.ts @@ -1,4 +1,5 @@ import { setupAnvil, setupCommon } from "@/_test/setup.js"; +import { getNetwork } from "@/_test/utils.js"; import type { Common } from "@/common/common.js"; import type { Network } from "@/config/networks.js"; import { beforeEach, expect, test } from "vitest"; @@ -15,8 +16,10 @@ const getQueue = (network: Network, common: Common) => { }); }; -test("requests", async ({ networks, common }) => { - const queue = getQueue(networks[0], common); +test("requests", async ({ common }) => { + const network = getNetwork(); + + const queue = getQueue(network, common); queue.start(); const chainId = await queue.request({ method: "eth_chainId" }); diff --git a/packages/core/src/utils/requestQueue.ts b/packages/core/src/utils/requestQueue.ts index 7d89750c4..72643a344 100644 --- a/packages/core/src/utils/requestQueue.ts +++ b/packages/core/src/utils/requestQueue.ts @@ -16,21 +16,23 @@ import { type RpcError, isHex, } from "viem"; +import type { DebugRpcSchema } from "./debug.js"; import { startClock } from "./timer.js"; import { wait } from "./wait.js"; -type RequestReturnType< - method extends EIP1193Parameters["method"], -> = Extract["ReturnType"]; +type Schema = [...PublicRpcSchema, ...DebugRpcSchema]; + +type RequestReturnType["method"]> = + Extract["ReturnType"]; export type RequestQueue = Omit< Queue< - RequestReturnType["method"]>, - EIP1193Parameters + RequestReturnType["method"]>, + EIP1193Parameters >, "add" > & { - request: >( + request: >( parameters: TParameters, ) => Promise>; }; diff --git a/packages/core/src/utils/rpc.ts b/packages/core/src/utils/rpc.ts index 57bf575c2..6150d30a7 100644 --- a/packages/core/src/utils/rpc.ts +++ b/packages/core/src/utils/rpc.ts @@ -147,59 +147,144 @@ export const _eth_getTransactionReceipt = ( }); /** - * Helper function for "trace_filter" request. - * - * Note: No strict typing is available. + * Helper function for "debug_traceBlockByNumber" request. */ -export const _trace_filter = ( +export const _debug_traceBlockByNumber = ( requestQueue: RequestQueue, - params: { - fromBlock: Hex | number; - toBlock: Hex | number; - fromAddress?: Address[]; - toAddress?: Address[]; + { + blockNumber, + }: { + blockNumber: Hex | number; }, ): Promise => requestQueue .request({ - method: "trace_filter", + method: "debug_traceBlockByNumber", params: [ - { - fromBlock: - typeof params.fromBlock === "number" - ? numberToHex(params.fromBlock) - : params.fromBlock, - toBlock: - typeof params.toBlock === "number" - ? numberToHex(params.toBlock) - : params.toBlock, - fromAddress: params.fromAddress - ? params.fromAddress.map((a) => toLowerCase(a)) - : undefined, - toAddress: params.toAddress - ? params.toAddress.map((a) => toLowerCase(a)) - : undefined, - }, + typeof blockNumber === "number" + ? numberToHex(blockNumber) + : blockNumber, + { tracer: "callTracer" }, ], - } as any) - .then((traces) => traces as unknown as SyncTrace[]); + }) + .then((traces) => { + const result: SyncTrace[] = []; + let index = 0; + // all traces that weren't included because the trace has an error + // or the trace's parent has an error, mapped to the error string + const failedTraces = new Map< + (typeof traces)[number]["result"], + { error?: string; revertReason?: string } + >(); + + const dfs = ( + frames: (typeof traces)[number]["result"][], + transactionHash: Hex, + parentFrame: (typeof traces)[number]["result"] | undefined, + ) => { + for (const frame of frames) { + if (frame.error !== undefined) { + failedTraces.set(frame, { + error: frame.error, + revertReason: frame.revertReason, + }); + } else if (parentFrame && failedTraces.has(parentFrame)) { + const error = failedTraces.get(parentFrame)!; + + frame.error = error.error; + frame.revertReason = error.revertReason; + + failedTraces.set(frame, error); + } + + // @ts-ignore + frame.index = index; + // @ts-ignore + frame.subcalls = frame.calls?.length ?? 0; + + result.push({ trace: frame as SyncTrace["trace"], transactionHash }); + + index++; + + if (frame.calls) { + dfs(frame.calls, transactionHash, frame); + } + } + }; + + for (const trace of traces) { + index = 0; + dfs([trace.result], trace.txHash, undefined); + } + + return result; + }); /** - * Helper function for "trace_block" request. + * Helper function for "debug_traceBlockByHash" request. */ -export const _trace_block = ( +export const _debug_traceBlockByHash = ( requestQueue: RequestQueue, - params: { - blockNumber: Hex | number; + { + hash, + }: { + hash: Hash; }, ): Promise => requestQueue .request({ - method: "trace_block", - params: [ - typeof params.blockNumber === "number" - ? numberToHex(params.blockNumber) - : params.blockNumber, - ], - } as any) - .then((traces) => traces as unknown as SyncTrace[]); + method: "debug_traceBlockByHash", + params: [hash, { tracer: "callTracer" }], + }) + .then((traces) => { + const result: SyncTrace[] = []; + let index = 0; + // all traces that weren't included because the trace has an error + // or the trace's parent has an error, mapped to the error string + const failedTraces = new Map< + (typeof traces)[number]["result"], + { error?: string; revertReason?: string } + >(); + + const dfs = ( + frames: (typeof traces)[number]["result"][], + transactionHash: Hex, + parentFrame: (typeof traces)[number]["result"] | undefined, + ) => { + for (const frame of frames) { + if (frame.error !== undefined) { + failedTraces.set(frame, { + error: frame.error, + revertReason: frame.revertReason, + }); + } else if (parentFrame && failedTraces.has(parentFrame)) { + const error = failedTraces.get(parentFrame)!; + + frame.error = error.error; + frame.revertReason = error.revertReason; + + failedTraces.set(frame, error); + } + + // @ts-ignore + frame.index = index; + // @ts-ignore + frame.subcalls = frame.calls?.length ?? 0; + + result.push({ trace: frame as SyncTrace["trace"], transactionHash }); + + index++; + + if (frame.calls) { + dfs(frame.calls, transactionHash, frame); + } + } + }; + + for (const trace of traces) { + index = 0; + dfs([trace.result], trace.txHash, undefined); + } + + return result; + }); diff --git a/packages/create-ponder/src/index.ts b/packages/create-ponder/src/index.ts index 3e4474bb7..e75e5908d 100644 --- a/packages/create-ponder/src/index.ts +++ b/packages/create-ponder/src/index.ts @@ -79,6 +79,11 @@ const templates = [ title: "Feature - Factory contract", description: "A Ponder app using a factory contract", }, + { + id: "feature-accounts", + title: "Feature - Accounts", + description: "A Ponder app using accounts", + }, { id: "feature-filter", title: "Feature - Custom event filter", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 87356f4e2..0f49bb281 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -136,6 +136,31 @@ importers: specifier: ^5.2.2 version: 5.3.3 + examples/feature-accounts: + dependencies: + '@ponder/core': + specifier: workspace:* + version: link:../../packages/core + hono: + specifier: ^4.5.0 + version: 4.5.0 + viem: + specifier: ^2.21.3 + version: 2.21.3(typescript@5.3.3)(zod@3.23.8) + devDependencies: + '@types/node': + specifier: ^20.10.0 + version: 20.11.24 + eslint: + specifier: ^8.54.0 + version: 8.56.0 + eslint-config-ponder: + specifier: workspace:* + version: link:../../packages/eslint-config-ponder + typescript: + specifier: ^5.3.2 + version: 5.3.3 + examples/feature-api-functions: dependencies: '@ponder/core':