From cb989913a4b897e13b86d1da12722483b6f37b48 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 17 Oct 2024 22:49:12 +0200 Subject: [PATCH 01/15] test with receipts encoding --- Cargo.lock | 921 +++++++++++++++--------- mp2-common/src/eth.rs | 386 +++++++++- mp2-v1/src/block_extraction/circuit.rs | 2 +- mp2-v1/src/block_extraction/mod.rs | 6 +- mp2-v1/tests/common/block_extraction.rs | 2 +- 5 files changed, 953 insertions(+), 364 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fc46475fa..e2e45aa77 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -74,9 +74,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy" @@ -101,11 +101,11 @@ dependencies = [ [[package]] name = "alloy-chains" -version = "0.1.38" +version = "0.1.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "156bfc5dcd52ef9a5f33381701fa03310317e14c65093a9430d3e3557b08dcd3" +checksum = "18c5c520273946ecf715c0010b4e3503d7eba9893cd9ce6b7fff5654c4a3c470" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "num_enum", "strum", ] @@ -117,7 +117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae09ffd7c29062431dd86061deefe4e3c6f07fa0d674930095f8dcedb0baf02c" dependencies = [ "alloy-eips", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rlp", "alloy-serde", "auto_impl", @@ -136,37 +136,37 @@ dependencies = [ "alloy-json-abi", "alloy-network", "alloy-network-primitives", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-provider", "alloy-rpc-types-eth", "alloy-sol-types", "alloy-transport", "futures", "futures-util", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] name = "alloy-core" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8316d83e590f4163b221b8180008f302bda5cf5451202855cdd323e588849c" +checksum = "c3d14d531c99995de71558e8e2206c27d709559ee8e5a0452b965ea82405a013" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rlp", "alloy-sol-types", ] [[package]] name = "alloy-dyn-abi" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2364c782a245cf8725ea6dbfca5f530162702b5d685992ea03ce64529136cc" +checksum = "80759b3f57b3b20fa7cd8fef6479930fc95461b58ff8adea6e87e618449c8a1d" dependencies = [ "alloy-json-abi", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-sol-type-parser", "alloy-sol-types", "const-hex", @@ -182,18 +182,18 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0069cf0642457f87a01a014f6dc29d5d893cd4fd8fddf0c3cdfad1bb3ebafc41" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rlp", "serde", ] [[package]] name = "alloy-eip7702" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6cee6a35793f3db8a5ffe60e86c695f321d081a567211245f503e8c498fce8" +checksum = "4c986539255fb839d1533c128e190e557e52ff652c9ef62939e233a81dd93f7e" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rlp", "derive_more 1.0.0", "serde", @@ -207,7 +207,7 @@ checksum = "5b6aa3961694b30ba53d41006131a2fca3bdab22e4c344e46db2c639e7c2dfdd" dependencies = [ "alloy-eip2930", "alloy-eip7702", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rlp", "alloy-serde", "c-kzg", @@ -223,18 +223,18 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e53f7877ded3921d18a0a9556d55bedf84535567198c9edab2aa23106da91855" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-serde", "serde", ] [[package]] name = "alloy-json-abi" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b84c506bf264110fa7e90d9924f742f40ef53c6572ea56a0b0bd714a567ed389" +checksum = "ac4b22b3e51cac09fd2adfcc73b55f447b4df669f983c13f7894ec82b607c63f" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-sol-type-parser", "serde", "serde_json", @@ -246,11 +246,11 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3694b7e480728c0b3e228384f223937f14c10caef5a4c766021190fc8f283d35" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-sol-types", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tracing", ] @@ -264,7 +264,7 @@ dependencies = [ "alloy-eips", "alloy-json-rpc", "alloy-network-primitives", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rpc-types-eth", "alloy-serde", "alloy-signer", @@ -274,7 +274,7 @@ dependencies = [ "futures-utils-wasm", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] @@ -285,7 +285,7 @@ checksum = "df9f3e281005943944d15ee8491534a1c7b3cbf7a7de26f8c433b842b93eb5f9" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-serde", "serde", ] @@ -297,12 +297,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9805d126f24be459b958973c0569c73e1aadd27d4535eee82b2b6764aa03616" dependencies = [ "alloy-genesis", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "k256", "rand", "serde_json", "tempfile", - "thiserror 1.0.64", + "thiserror 1.0.69", "tracing", "url", ] @@ -326,9 +326,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fce5dbd6a4f118eecc4719eaa9c7ffc31c315e6c5ccde3642db927802312425" +checksum = "9db948902dfbae96a73c2fbf1f7abec62af034ab883e4c777c3fd29702bd6e2c" dependencies = [ "alloy-rlp", "bytes", @@ -337,9 +337,9 @@ dependencies = [ "derive_more 1.0.0", "foldhash", "getrandom 0.2.15", - "hashbrown 0.15.0", + "hashbrown 0.15.2", "hex-literal", - "indexmap 2.6.0", + "indexmap 2.7.0", "itoa", "k256", "keccak-asm", @@ -367,7 +367,7 @@ dependencies = [ "alloy-network", "alloy-network-primitives", "alloy-node-bindings", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rpc-client", "alloy-rpc-types-anvil", "alloy-rpc-types-eth", @@ -384,11 +384,11 @@ dependencies = [ "lru", "parking_lot", "pin-project", - "reqwest 0.12.8", + "reqwest 0.12.9", "schnellru", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tokio", "tracing", "url", @@ -414,7 +414,7 @@ checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -424,12 +424,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "374dbe0dc3abdc2c964f36b3d3edf9cdb3db29d16bda34aa123f03d810bec1dd" dependencies = [ "alloy-json-rpc", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-transport", "alloy-transport-http", "futures", "pin-project", - "reqwest 0.12.8", + "reqwest 0.12.9", "serde", "serde_json", "tokio", @@ -446,7 +446,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c74832aa474b670309c20fffc2a869fa141edab7c79ff7963fad0a08de60bae1" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -458,7 +458,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ca97963132f78ddfc60e43a017348e6d52eea983925c23652f5b330e8e02291" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -473,7 +473,7 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-network-primitives", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-rlp", "alloy-serde", "alloy-sol-types", @@ -489,7 +489,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dfa4a7ccf15b2492bb68088692481fd6b2604ccbee1d0d6c44c21427ae4df83" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "serde", "serde_json", ] @@ -500,12 +500,12 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e10aec39d60dc27edcac447302c7803d2371946fb737245320a05b78eb2fafd" dependencies = [ - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "async-trait", "auto_impl", "elliptic-curve", "k256", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] @@ -516,52 +516,52 @@ checksum = "d8396f6dff60700bc1d215ee03d86ff56de268af96e2bf833a14d0bafcab9882" dependencies = [ "alloy-consensus", "alloy-network", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-signer", "async-trait", "k256", "rand", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] name = "alloy-sol-macro" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9343289b4a7461ed8bab8618504c995c049c082b70c7332efd7b32125633dc05" +checksum = "3bfd7853b65a2b4f49629ec975fee274faf6dff15ab8894c620943398ef283c0" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4222d70bec485ceccc5d8fd4f2909edd65b5d5e43d4aca0b5dcee65d519ae98f" +checksum = "82ec42f342d9a9261699f8078e57a7a4fda8aaa73c1a212ed3987080e6a9cd13" dependencies = [ "alloy-json-abi", "alloy-sol-macro-input", "const-hex", "heck 0.5.0", - "indexmap 2.6.0", + "indexmap 2.7.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e17f2677369571b976e51ea1430eb41c3690d344fef567b840bfc0b01b6f83a" +checksum = "ed2c50e6a62ee2b4f7ab3c6d0366e5770a21cad426e109c2f40335a1b3aff3df" dependencies = [ "alloy-json-abi", "const-hex", @@ -570,15 +570,15 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.89", + "syn 2.0.90", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa64d80ae58ffaafdff9d5d84f58d03775f66c84433916dc9a64ed16af5755da" +checksum = "ac17c6e89a50fb4a758012e4b409d9a0ba575228e69b539fe37d7a1bd507ca4a" dependencies = [ "serde", "winnow", @@ -586,12 +586,12 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6520d427d4a8eb7aa803d852d7a52ceb0c519e784c292f64bb339e636918cf27" +checksum = "c9dc0fffe397aa17628160e16b89f704098bf3c9d74d5d369ebc239575936de5" dependencies = [ "alloy-json-abi", - "alloy-primitives 0.8.12", + "alloy-primitives 0.8.14", "alloy-sol-macro", "const-hex", "serde", @@ -609,7 +609,7 @@ dependencies = [ "futures-utils-wasm", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tokio", "tower", "tracing", @@ -625,7 +625,7 @@ checksum = "5dc013132e34eeadaa0add7e74164c1503988bfba8bae885b32e0918ba85a8a6" dependencies = [ "alloy-json-rpc", "alloy-transport", - "reqwest 0.12.8", + "reqwest 0.12.9", "serde_json", "tower", "tracing", @@ -668,9 +668,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -683,43 +683,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.89" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" [[package]] name = "ark-ff" @@ -891,7 +891,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -902,7 +902,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -935,7 +935,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -991,9 +991,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "bb8" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10cf871f3ff2ce56432fddc2615ac7acc3aa22ca321f8fea800846fbb32f188" +checksum = "d89aabfae550a5c44b43ab941844ffcd2e993cb6900b342debf59e9ea74acdb8" dependencies = [ "async-trait", "futures-util", @@ -1124,9 +1124,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" dependencies = [ "serde", ] @@ -1184,9 +1184,9 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] @@ -1202,14 +1202,14 @@ dependencies = [ "semver 1.0.23", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] name = "cc" -version = "1.1.30" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16803a61b81d9eabb7eae2588776c4c1e584b738ede45fdbb4c972cec1e9945" +checksum = "f34d93e62b03caf570cccc334cbc6c2fceca82f39211051345108adcba3eebdc" dependencies = [ "jobserver", "libc", @@ -1247,9 +1247,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.20" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" dependencies = [ "clap_builder", "clap_derive", @@ -1257,9 +1257,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.20" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" dependencies = [ "anstream", "anstyle", @@ -1276,14 +1276,14 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" [[package]] name = "coins-bip32" @@ -1298,7 +1298,7 @@ dependencies = [ "k256", "serde", "sha2", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] @@ -1314,7 +1314,7 @@ dependencies = [ "pbkdf2 0.12.2", "rand", "sha2", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] @@ -1334,14 +1334,14 @@ dependencies = [ "serde_derive", "sha2", "sha3", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] name = "colorchoice" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "colored" @@ -1368,9 +1368,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.13.1" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0121754e84117e65f9d90648ee6aa4882a6e63110307ab73967a4c5e7e69e586" +checksum = "4b0485bab839b018a8f1723fc5391819fea5f8f0f32288ef8a735fd096b6160c" dependencies = [ "cfg-if", "cpufeatures", @@ -1435,9 +1435,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -1506,9 +1506,9 @@ dependencies = [ [[package]] name = "csv" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" +checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" dependencies = [ "csv-core", "itoa", @@ -1571,7 +1571,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1595,7 +1595,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1606,7 +1606,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1650,7 +1650,7 @@ checksum = "bc2323e10c92e1cf4d86e11538512e6dc03ceb586842970b6332af3d4046a046" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1694,7 +1694,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version 0.4.1", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1714,7 +1714,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "unicode-xid", ] @@ -1728,7 +1728,7 @@ dependencies = [ "fuzzy-matcher", "shell-words", "tempfile", - "thiserror 1.0.64", + "thiserror 1.0.69", "zeroize", ] @@ -1795,6 +1795,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "dotenv" version = "0.15.0" @@ -1888,9 +1899,9 @@ checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" [[package]] name = "encoding_rs" -version = "0.8.34" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] @@ -1921,7 +1932,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1990,7 +2001,7 @@ checksum = "d4291f0c7220b67ad15e9d5300ba2f215cee504f0924d60e77c9d1c77e7a69b1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -2001,12 +2012,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2027,7 +2038,7 @@ dependencies = [ "serde_json", "sha2", "sha3", - "thiserror 1.0.64", + "thiserror 1.0.69", "uuid 0.8.2", ] @@ -2057,7 +2068,7 @@ dependencies = [ "serde", "serde_json", "sha3", - "thiserror 1.0.64", + "thiserror 1.0.69", "uint", ] @@ -2161,7 +2172,7 @@ dependencies = [ "pin-project", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] @@ -2180,7 +2191,7 @@ dependencies = [ "pin-project", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] @@ -2201,7 +2212,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "syn 2.0.89", + "syn 2.0.90", "toml", "walkdir", ] @@ -2225,7 +2236,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "syn 2.0.89", + "syn 2.0.90", "toml", "walkdir", ] @@ -2242,7 +2253,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -2258,7 +2269,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -2283,9 +2294,9 @@ dependencies = [ "serde", "serde_json", "strum", - "syn 2.0.89", + "syn 2.0.90", "tempfile", - "thiserror 1.0.64", + "thiserror 1.0.69", "tiny-keccak", "unicode-xid", ] @@ -2313,9 +2324,9 @@ dependencies = [ "serde", "serde_json", "strum", - "syn 2.0.89", + "syn 2.0.90", "tempfile", - "thiserror 1.0.64", + "thiserror 1.0.69", "tiny-keccak", "unicode-xid", ] @@ -2331,7 +2342,7 @@ dependencies = [ "semver 1.0.23", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tracing", ] @@ -2347,7 +2358,7 @@ dependencies = [ "semver 1.0.23", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tracing", ] @@ -2369,7 +2380,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tokio", "tracing", "tracing-futures", @@ -2396,7 +2407,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tokio", "tracing", "tracing-futures", @@ -2427,7 +2438,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tokio", "tokio-tungstenite", "tracing", @@ -2464,7 +2475,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 1.0.69", "tokio", "tokio-tungstenite", "tracing", @@ -2490,7 +2501,7 @@ dependencies = [ "ethers-core 2.0.13", "rand", "sha2", - "thiserror 1.0.64", + "thiserror 1.0.69", "tracing", ] @@ -2509,7 +2520,7 @@ dependencies = [ "ethers-core 2.0.14", "rand", "sha2", - "thiserror 1.0.64", + "thiserror 1.0.69", "tracing", ] @@ -2536,7 +2547,7 @@ dependencies = [ "serde_json", "solang-parser", "svm-rs", - "thiserror 1.0.64", + "thiserror 1.0.69", "tiny-keccak", "tokio", "tracing", @@ -2568,7 +2579,7 @@ dependencies = [ "serde_json", "solang-parser", "svm-rs", - "thiserror 1.0.64", + "thiserror 1.0.69", "tiny-keccak", "tokio", "tracing", @@ -2594,9 +2605,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" [[package]] name = "fastrlp" @@ -2673,9 +2684,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -2809,7 +2820,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -2973,7 +2984,7 @@ dependencies = [ "revm", "serde", "serde_json", - "serial_test 3.1.1", + "serial_test 3.2.0", "sha2", "verifiable-db", ] @@ -3001,7 +3012,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.6.0", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -3034,9 +3045,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.0" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ "allocator-api2", "equivalent", @@ -3195,9 +3206,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.30" +version = "0.14.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" dependencies = [ "bytes", "futures-channel", @@ -3219,9 +3230,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", @@ -3244,7 +3255,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.30", + "hyper 0.14.31", "rustls", "tokio", "tokio-rustls", @@ -3257,7 +3268,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.30", + "hyper 0.14.31", "native-tls", "tokio", "tokio-native-tls", @@ -3271,7 +3282,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", "native-tls", "tokio", @@ -3281,16 +3292,16 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", "futures-util", "http 1.1.0", "http-body 1.0.1", - "hyper 1.4.1", + "hyper 1.5.1", "pin-project-lite", "socket2", "tokio", @@ -3321,6 +3332,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -3329,12 +3458,23 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] @@ -3366,13 +3506,13 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] @@ -3394,12 +3534,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown 0.15.0", + "hashbrown 0.15.2", "serde", ] @@ -3490,9 +3630,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jammdb" @@ -3521,10 +3661,11 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "a865e038f7f6ed956f788f0d7d60c541fff74c7bd74272c5d4cf15c63743e705" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -3622,7 +3763,7 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex-automata 0.4.8", + "regex-automata 0.4.9", ] [[package]] @@ -3636,15 +3777,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.159" +version = "0.2.167" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" +checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" [[package]] name = "libm" -version = "0.2.8" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libredox" @@ -3662,6 +3803,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "lock_api" version = "0.4.12" @@ -3684,7 +3831,7 @@ version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.15.0", + "hashbrown 0.15.2", ] [[package]] @@ -3744,11 +3891,10 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi 0.3.9", "libc", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", @@ -3765,7 +3911,7 @@ dependencies = [ "eth_trie", "ethereum-types", "ethers 2.0.13", - "hashbrown 0.15.0", + "hashbrown 0.15.2", "hex", "itertools 0.13.0", "log", @@ -3817,7 +3963,7 @@ dependencies = [ "envconfig", "eth_trie", "futures", - "hashbrown 0.15.0", + "hashbrown 0.15.2", "hex", "itertools 0.13.0", "jammdb", @@ -3837,7 +3983,7 @@ dependencies = [ "ryhope", "serde", "serde_json", - "serial_test 3.1.1", + "serial_test 3.2.0", "sqlparser", "test-log", "testfile", @@ -4011,7 +4157,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4056,9 +4202,9 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.66" +version = "0.10.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" dependencies = [ "bitflags 2.6.0", "cfg-if", @@ -4077,7 +4223,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4088,9 +4234,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.103" +version = "0.9.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" dependencies = [ "cc", "libc", @@ -4149,14 +4295,14 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] @@ -4267,7 +4413,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" dependencies = [ "memchr", - "thiserror 1.0.64", + "thiserror 1.0.69", "ucd-trie", ] @@ -4278,7 +4424,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.6.0", + "indexmap 2.7.0", ] [[package]] @@ -4321,7 +4467,7 @@ dependencies = [ "phf_shared 0.11.2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4344,29 +4490,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf123a161dde1e524adf36f90bc5d8d3462824a9c43553ad07a8183161189ec" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4502d8515ca9f32f1fb543d987f63d95a14934883db45bdb48060b6b69257f8" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -4556,7 +4702,7 @@ dependencies = [ "starkyx", "tokio", "tracing", - "uuid 1.10.0", + "uuid 1.11.0", ] [[package]] @@ -4566,7 +4712,7 @@ source = "git+https://github.com/Lagrange-Labs/succinctx?branch=fix-build#8580a6 dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4636,12 +4782,12 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "prettyplease" -version = "0.2.22" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4720,14 +4866,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "proc-macro2" -version = "1.0.91" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "307e3004becf10f5a6e0d59d20f3cd28231b0e0827a96cd3e0ce6d14bc1e4bb3" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -4855,7 +5001,7 @@ dependencies = [ "plonky2_monolith", "rstest 0.23.0", "serde", - "serial_test 3.1.1", + "serial_test 3.2.0", ] [[package]] @@ -4875,18 +5021,18 @@ checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom 0.2.15", "libredox", - "thiserror 1.0.64", + "thiserror 1.0.69", ] [[package]] name = "regex" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] @@ -4901,9 +5047,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -4942,7 +5088,7 @@ dependencies = [ "h2", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-rustls", "hyper-tls 0.5.0", "ipnet", @@ -4974,9 +5120,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.8" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" dependencies = [ "base64 0.22.1", "bytes", @@ -4985,7 +5131,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -5000,7 +5146,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tokio", "tokio-native-tls", "tower-service", @@ -5185,7 +5331,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.89", + "syn 2.0.90", "unicode-ident", ] @@ -5210,7 +5356,7 @@ dependencies = [ "rlp", "ruint-macro", "serde", - "thiserror 1.0.64", + "thiserror 1.0.69", "valuable", "zeroize", ] @@ -5229,9 +5375,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" [[package]] name = "rustc-hex" @@ -5259,9 +5405,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.37" +version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags 2.6.0", "errno", @@ -5383,42 +5529,42 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.11.3" +version = "2.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca070c12893629e2cc820a9761bedf6ce1dcddc9852984d1dc734b8bd9bd024" +checksum = "346a3b32eba2640d17a9cb5927056b08f3de90f65b72fe09402c2ad07d684d0b" dependencies = [ "cfg-if", - "derive_more 0.99.18", + "derive_more 1.0.0", "parity-scale-codec", "scale-info-derive", ] [[package]] name = "scale-info-derive" -version = "2.11.3" +version = "2.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" +checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] name = "scc" -version = "2.2.2" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2c1f7fc6deb21665a9060dfc7d271be784669295a31babdcd4dd2c79ae8cbfb" +checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed" dependencies = [ "sdd", ] [[package]] name = "schannel" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] @@ -5497,9 +5643,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", @@ -5525,9 +5671,9 @@ dependencies = [ [[package]] name = "semver-parser" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" dependencies = [ "pest", ] @@ -5546,29 +5692,29 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.210" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.210" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "serde_json" -version = "1.0.128" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", @@ -5632,7 +5778,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.6.0", + "indexmap 2.7.0", "serde", "serde_derive", "serde_json", @@ -5649,7 +5795,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5661,7 +5807,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5680,16 +5826,16 @@ dependencies = [ [[package]] name = "serial_test" -version = "3.1.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" +checksum = "1b258109f244e1d6891bf1053a55d63a5cd4f8f4c30cf9a1280989f80e7a1fa9" dependencies = [ "futures", "log", "once_cell", "parking_lot", "scc", - "serial_test_derive 3.1.1", + "serial_test_derive 3.2.0", ] [[package]] @@ -5705,13 +5851,13 @@ dependencies = [ [[package]] name = "serial_test_derive" -version = "3.1.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" +checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5817,7 +5963,7 @@ checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ "num-bigint 0.4.6", "num-traits", - "thiserror 1.0.64", + "thiserror 1.0.69", "time", ] @@ -5855,9 +6001,9 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -5873,7 +6019,7 @@ dependencies = [ "lalrpop", "lalrpop-util", "phf", - "thiserror 1.0.64", + "thiserror 1.0.69", "unicode-xid", ] @@ -5908,6 +6054,12 @@ dependencies = [ "log", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "starkyx" version = "0.1.0" @@ -5995,7 +6147,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6041,7 +6193,7 @@ dependencies = [ "serde", "serde_json", "sha2", - "thiserror 1.0.64", + "thiserror 1.0.69", "url", "zip", ] @@ -6059,9 +6211,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.89" +version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ "proc-macro2", "quote", @@ -6070,14 +6222,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f76fe0a3e1476bdaa0775b9aec5b869ed9520c2b2fedfe9c6df3618f8ea6290b" +checksum = "da0523f59468a2696391f2a772edc089342aacd53c3caa2ac3264e598edf119b" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6088,13 +6240,24 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -6149,9 +6312,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", @@ -6199,7 +6362,7 @@ checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6213,11 +6376,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.64" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl 1.0.64", + "thiserror-impl 1.0.69", ] [[package]] @@ -6231,13 +6394,13 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "1.0.64" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6248,7 +6411,7 @@ checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6310,6 +6473,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" version = "1.8.0" @@ -6327,9 +6500,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.40.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "bytes", @@ -6351,7 +6524,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6467,7 +6640,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "serde", "serde_spanned", "toml_datetime", @@ -6502,9 +6675,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", "tracing-attributes", @@ -6513,20 +6686,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -6555,9 +6728,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", @@ -6591,7 +6764,7 @@ dependencies = [ "rand", "rustls", "sha1", - "thiserror 1.0.64", + "thiserror 1.0.69", "url", "utf-8", ] @@ -6634,9 +6807,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" @@ -6689,9 +6862,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -6704,6 +6877,18 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -6722,9 +6907,9 @@ dependencies = [ [[package]] name = "uuid" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "serde", ] @@ -6762,7 +6947,7 @@ dependencies = [ "recursion_framework", "ryhope", "serde", - "serial_test 3.1.1", + "serial_test 3.2.0", "tokio", ] @@ -6841,9 +7026,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c" dependencies = [ "cfg-if", "once_cell", @@ -6852,36 +7037,37 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.45" +version = "0.4.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +checksum = "9dfaf8f50e5f293737ee323940c7d8b08a66a95a419223d9f41610ca08b0833d" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6889,22 +7075,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49" [[package]] name = "wasmtimer" @@ -6922,9 +7108,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.72" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +checksum = "a98bc3c33f0fe7e59ad7cd041b89034fa82a7c2d4365ca538dda6cdaf513863c" dependencies = [ "js-sys", "wasm-bindgen", @@ -7194,6 +7380,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "ws_stream_wasm" version = "0.7.4" @@ -7207,7 +7405,7 @@ dependencies = [ "pharos", "rustc_version 0.4.1", "send_wrapper 0.6.0", - "thiserror 1.0.64", + "thiserror 1.0.69", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -7228,6 +7426,30 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -7246,7 +7468,28 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", + "synstructure", ] [[package]] @@ -7266,7 +7509,29 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", ] [[package]] diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index ba863d475..779199dc4 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -1,18 +1,25 @@ //! Module containing several structure definitions for Ethereum related operations //! such as fetching blocks, transactions, creating MPTs, getting proofs, etc. use alloy::{ + consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom}, eips::BlockNumberOrTag, + network::{eip2718::Encodable2718, TransactionResponse}, primitives::{Address, B256}, providers::{Provider, RootProvider}, rlp::Encodable as AlloyEncodable, - rpc::types::{Block, EIP1186AccountProofResponse}, - transports::Transport, + rpc::types::{ + Block, BlockTransactions, EIP1186AccountProofResponse, ReceiptEnvelope, Transaction, + }, + transports::{ + http::{Client, Http}, + Transport, + }, }; -use anyhow::{bail, Result}; +use anyhow::{bail, Context, Result}; use eth_trie::{EthTrie, MemoryDB, Trie}; use ethereum_types::H256; use log::warn; -use rlp::Rlp; +use rlp::{Encodable, Rlp}; use serde::{Deserialize, Serialize}; use std::{array::from_fn as create_array, sync::Arc}; @@ -21,7 +28,7 @@ use crate::{mpt_sequential::utils::bytes_to_nibbles, rlp::MAX_KEY_NIBBLE_LEN, ut /// Retry number for the RPC request const RETRY_NUM: usize = 3; -pub trait BlockUtil { +pub trait Rlpable { fn block_hash(&self) -> Vec { keccak256(&self.rlp()) } @@ -252,7 +259,7 @@ impl ProofQuery { } } -impl BlockUtil for alloy::rpc::types::Block { +impl Rlpable for alloy::rpc::types::Block { fn rlp(&self) -> Vec { let mut out = Vec::new(); self.header.encode(&mut out); @@ -260,7 +267,13 @@ impl BlockUtil for alloy::rpc::types::Block { } } -impl BlockUtil for alloy::rpc::types::Header { +impl Rlpable for alloy::rpc::types::Header { + fn rlp(&self) -> Vec { + self.inner.rlp() + } +} + +impl Rlpable for alloy::consensus::Header { fn rlp(&self) -> Vec { let mut out = Vec::new(); self.encode(&mut out); @@ -268,6 +281,265 @@ impl BlockUtil for alloy::rpc::types::Header { } } +pub struct BlockUtil { + pub block: Block, + pub txs: Vec, + pub receipts_trie: EthTrie, +} + +pub struct TxWithReceipt(Transaction, ReceiptEnvelope); +impl TxWithReceipt { + pub fn receipt(&self) -> ReceiptEnvelope { + self.1.clone() + } +} + +impl BlockUtil { + pub async fn fetch(t: RootProvider>, id: BlockNumberOrTag) -> Result { + let block = t + .get_block(id.into(), alloy::rpc::types::BlockTransactionsKind::Full) + .await? + .context("can't get block")?; + let receipts = t + .get_block_receipts(id.into()) + .await? + .context("can't get receipts")?; + let BlockTransactions::Full(all_tx) = block.transactions.clone() else { + bail!("can't see full transactions"); + }; + let tx_receipts: Vec<(_, _)> = receipts + .into_iter() + .map(|receipt| { + ( + all_tx + .iter() + .find(|tx| tx.tx_hash() == receipt.transaction_hash) + .expect("no tx with receipt hash") + .clone(), + receipt, + ) + }) + .collect(); + // check receipt root + let memdb = Arc::new(MemoryDB::new(true)); + let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); + let consensus_receipts = tx_receipts + .into_iter() + .map(|tr| { + let receipt = tr.1; + let tx_index = receipt.transaction_index.unwrap().rlp_bytes(); + //let mut buff = Vec::new(); + let receipt_primitive = receipt.inner.clone(); + let receipt_primitive = match receipt_primitive { + CRE::Legacy(ref r) => CRE::Legacy(from_rpc_logs_to_consensus(&r)), + CRE::Eip2930(ref r) => CRE::Eip2930(from_rpc_logs_to_consensus(&r)), + CRE::Eip1559(ref r) => CRE::Eip1559(from_rpc_logs_to_consensus(&r)), + CRE::Eip4844(ref r) => CRE::Eip4844(from_rpc_logs_to_consensus(&r)), + CRE::Eip7702(ref r) => CRE::Eip7702(from_rpc_logs_to_consensus(&r)), + _ => panic!("aie"), + }; + let body_rlp = receipt_primitive.encoded_2718(); + + receipts_trie + .insert(&tx_index, &body_rlp) + .expect("can't insert tx"); + TxWithReceipt(tr.0, receipt_primitive) + }) + .collect::>(); + Ok(BlockUtil { + block, + txs: consensus_receipts, + receipts_trie, + }) + } + + // recompute the receipts trie by first converting all receipts form RPC type to consensus type + // since in Alloy these are two different types and RLP functions are only implemented for + // consensus ones. + // TODO: transaction trie + fn check(&mut self) -> Result<()> { + let computed = self.receipts_trie.root_hash().expect("root hash problem"); + let expected = self.block.header.receipts_root; + assert_eq!(expected.to_vec(), computed.0.to_vec()); + Ok(()) + } +} + +fn from_rpc_logs_to_consensus( + r: &ReceiptWithBloom, +) -> ReceiptWithBloom { + ReceiptWithBloom { + logs_bloom: r.logs_bloom, + receipt: alloy::consensus::Receipt { + status: r.receipt.status, + cumulative_gas_used: r.receipt.cumulative_gas_used, + logs: r + .receipt + .logs + .iter() + .map(|l| alloy::primitives::Log { + address: l.inner.address, + data: l.inner.data.clone(), + }) + .collect(), + }, + } +} + +// for compatibility check with alloy +#[cfg(test)] +mod tryethers { + + use std::sync::Arc; + + use anyhow::Result; + use eth_trie::{EthTrie, MemoryDB, Trie}; + use ethers::{ + providers::{Http, Middleware, Provider}, + types::{ + Address, Block, BlockId, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, + H256, U64, + }, + }; + use rlp::{Encodable, Rlp, RlpStream}; + + /// A wrapper around a transaction and its receipt. The receipt is used to filter + /// bad transactions, so we only compute over valid transactions. + pub struct TxAndReceipt(Transaction, TransactionReceipt); + + impl TxAndReceipt { + pub fn tx(&self) -> &Transaction { + &self.0 + } + pub fn receipt(&self) -> &TransactionReceipt { + &self.1 + } + pub fn tx_rlp(&self) -> Bytes { + self.0.rlp() + } + // TODO: this should be upstreamed to ethers-rs + pub fn receipt_rlp(&self) -> Bytes { + let tx_type = self.tx().transaction_type; + let mut rlp = RlpStream::new(); + rlp.begin_unbounded_list(); + match &self.1.status { + Some(s) if s.as_u32() == 1 => rlp.append(s), + _ => rlp.append_empty_data(), + }; + rlp.append(&self.1.cumulative_gas_used) + .append(&self.1.logs_bloom) + .append_list(&self.1.logs); + + rlp.finalize_unbounded_list(); + let rlp_bytes: Bytes = rlp.out().freeze().into(); + let mut encoded = vec![]; + match tx_type { + // EIP-2930 (0x01) + Some(x) if x == U64::from(0x1) => { + encoded.extend_from_slice(&[0x1]); + encoded.extend_from_slice(rlp_bytes.as_ref()); + encoded.into() + } + // EIP-1559 (0x02) + Some(x) if x == U64::from(0x2) => { + encoded.extend_from_slice(&[0x2]); + encoded.extend_from_slice(rlp_bytes.as_ref()); + encoded.into() + } + _ => rlp_bytes, + } + } + } + /// Structure containing the block header and its transactions / receipts. Amongst other things, + /// it is used to create a proof of inclusion for any transaction inside this block. + pub struct BlockData { + pub block: ethers::types::Block, + pub txs: Vec, + // TODO: add generics later - this may be re-used amongst different workers + pub tx_trie: EthTrie, + pub receipts_trie: EthTrie, + } + + impl BlockData { + pub async fn fetch + Send + Sync>( + blockid: T, + url: String, + ) -> Result { + let provider = + Provider::::try_from(url).expect("could not instantiate HTTP Provider"); + Self::fetch_from(&provider, blockid).await + } + pub async fn fetch_from + Send + Sync>( + provider: &Provider, + blockid: T, + ) -> Result { + let block = provider + .get_block_with_txs(blockid) + .await? + .expect("should have been a block"); + let receipts = provider.get_block_receipts(block.number.unwrap()).await?; + + let tx_with_receipt = block + .transactions + .clone() + .into_iter() + .map(|tx| { + let tx_hash = tx.hash(); + let r = receipts + .iter() + .find(|r| r.transaction_hash == tx_hash) + .expect("RPC sending invalid data"); + // TODO remove cloning + TxAndReceipt(tx, r.clone()) + }) + .collect::>(); + + // check transaction root + let memdb = Arc::new(MemoryDB::new(true)); + let mut tx_trie = EthTrie::new(Arc::clone(&memdb)); + for tr in tx_with_receipt.iter() { + tx_trie + .insert(&tr.receipt().transaction_index.rlp_bytes(), &tr.tx().rlp()) + .expect("can't insert tx"); + } + + // check receipt root + let memdb = Arc::new(MemoryDB::new(true)); + let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); + for tr in tx_with_receipt.iter() { + if tr.tx().transaction_index.unwrap() == U64::from(0) { + println!( + "Ethers: Index {} -> {}", + tr.tx().transaction_index.unwrap(), + hex::encode(tr.receipt_rlp()) + ); + } + receipts_trie + .insert( + &tr.receipt().transaction_index.rlp_bytes(), + // TODO: make getter value for rlp encoding + &tr.receipt_rlp(), + ) + .expect("can't insert tx"); + } + let computed = tx_trie.root_hash().expect("root hash problem"); + let expected = block.transactions_root; + assert_eq!(expected, computed); + + let computed = receipts_trie.root_hash().expect("root hash problem"); + let expected = block.receipts_root; + assert_eq!(expected, computed); + + Ok(BlockData { + block, + tx_trie, + receipts_trie, + txs: tx_with_receipt, + }) + } + } +} + #[cfg(test)] mod test { #[cfg(feature = "ci")] @@ -282,39 +554,87 @@ mod test { }; use hashbrown::HashMap; - use crate::{ - types::MAX_BLOCK_LEN, - utils::{Endianness, Packer}, - }; + use crate::utils::{Endianness, Packer}; use mp2_test::eth::{get_mainnet_url, get_sepolia_url}; + use super::*; + #[tokio::test] - #[ignore] - async fn test_rlp_andrus() -> Result<()> { + async fn test_block_receipt_trie() -> Result<()> { let url = get_sepolia_url(); - let block_number1 = 5674446; - let block_number2 = block_number1 + 1; + // get some tx and receipt let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let block = provider - .get_block(BlockNumberOrTag::Number(block_number1).into(), false.into()) - .await? - .unwrap(); - let comp_hash = keccak256(&block.rlp()); - let block_next = provider - .get_block(BlockNumberOrTag::from(block_number2).into(), false.into()) - .await? - .unwrap(); - let exp_hash = block_next.header.parent_hash; - assert!(comp_hash == exp_hash.as_slice()); - assert!( - block.rlp().len() <= MAX_BLOCK_LEN, - " rlp len = {}", - block.rlp().len() + let bn = 6893107; + let bna = BlockNumberOrTag::Number(bn); + let mut block = BlockUtil::fetch(provider, bna).await?; + // check if we compute the RLP correctly now + block.check()?; + let mut be = tryethers::BlockData::fetch(bn, url).await?; + let er = be.receipts_trie.root_hash()?; + let ar = block.receipts_trie.root_hash()?; + assert_eq!(er, ar); + // dissect one receipt entry in the trie + let tx_receipt = block.txs.first().clone().unwrap(); + // https://sepolia.etherscan.io/tx/0x9bef12fafd3962b0e0d66b738445d6ea2c1f3daabe10c889bd1916acc75d698b#eventlog + println!( + "Looking at tx hash on sepolia: {}", + hex::encode(tx_receipt.0.tx_hash()) ); + // in the MPT trie it's + // RLP ( RLP(Index), RLP ( LOGS )) + // the second component is done like that: + // + let rlp_encoding = tx_receipt.receipt().encoded_2718(); + let state = rlp::Rlp::new(&rlp_encoding); + assert!(state.is_list()); + // index 0 -> status, + // index 1 -> gas used + // index 2 -> logs_bloom + // index 3 -> logs + let logs_state = state.at(3).context("can't access logs field3")?; + assert!(logs_state.is_list()); + // there should be only one log for this tx + let log_state = logs_state.at(0).context("can't access first log")?; + assert!(log_state.is_list()); + // log: + // 0: address where it has been emitted + // 1: Topics (4 topics max, with 1 mandatory, the event sig) + // 2: Bytes32 array + let log_address: Vec = log_state.val_at(0).context("can't decode address")?; + let hex_address = hex::encode(&log_address); + assert_eq!( + hex_address, + "BBd3EDd4D3b519c0d14965d9311185CFaC8c3220".to_lowercase(), + ); + let topics: Vec> = log_state.list_at(1).context("can't decode topics")?; + // Approval (index_topic_1 address owner, index_topic_2 address approved, index_topic_3 uint256 tokenId)View Source + // first topic is signature of the event keccak(fn_name,args...) + let expected_sig = "8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925"; + let found_sig = hex::encode(&topics[0]); + assert_eq!(expected_sig, found_sig); + // second topic is owner + let expected_owner = hex::encode(left_pad32(&hex::decode( + "66d2F437a12d8f9f340C226b1EDC605124e763A6", + )?)); + let found_owner = hex::encode(&topics[1]); + assert_eq!(expected_owner, found_owner); + // third topic is approved + let expected_approved = hex::encode(left_pad32(&hex::decode( + "094f1570A8B5fc99d6756aD54DF0Fd6906795cd3", + )?)); + let found_approved = hex::encode(left_pad32(&topics[2])); + assert_eq!(expected_approved, found_approved); + // final is tokenid - not in topic + let expected_data = "000000000000000000000000000000000000000000115eec47f6cf7e35000000"; + let log_data: Vec = log_state.val_at(2).context("can't decode log data")?; + let found_data = hex::encode(&left_pad32( + &log_data.into_iter().take(32).collect::>(), + )); + assert_eq!(expected_data, found_data); + Ok(()) } - use super::*; #[tokio::test] async fn test_sepolia_slot() -> Result<()> { #[cfg(feature = "ci")] @@ -504,7 +824,7 @@ mod test { let previous_block = provider .get_block_by_number( BlockNumberOrTag::Number(block.header.number - 1), - true.into(), + alloy::rpc::types::BlockTransactionsKind::Full, ) .await? .unwrap(); @@ -567,7 +887,7 @@ mod test { } /// TEST to compare alloy with ethers pub struct RLPBlock<'a, X>(pub &'a ethers::types::Block); - impl BlockUtil for ethers::types::Block { + impl Rlpable for ethers::types::Block { fn rlp(&self) -> Vec { let rlp = RLPBlock(self); rlp::encode(&rlp).to_vec() diff --git a/mp2-v1/src/block_extraction/circuit.rs b/mp2-v1/src/block_extraction/circuit.rs index 0600285a8..ceb6df077 100644 --- a/mp2-v1/src/block_extraction/circuit.rs +++ b/mp2-v1/src/block_extraction/circuit.rs @@ -131,7 +131,7 @@ mod test { use mp2_common::{eth::left_pad_generic, u256, utils::ToFields, C, F}; use mp2_common::{ - eth::BlockUtil, + eth::Rlpable, types::CBuilder, utils::{Endianness, Packer}, D, diff --git a/mp2-v1/src/block_extraction/mod.rs b/mp2-v1/src/block_extraction/mod.rs index de6648f41..79ff29640 100644 --- a/mp2-v1/src/block_extraction/mod.rs +++ b/mp2-v1/src/block_extraction/mod.rs @@ -69,7 +69,7 @@ mod test { }; use anyhow::Result; use mp2_common::{ - eth::BlockUtil, + eth::Rlpable, proof::deserialize_proof, utils::{Endianness, FromFields, Packer, ToFields}, C, D, F, @@ -121,7 +121,11 @@ mod test { ); assert_eq!( U256::from_fields(pi.block_number_raw()), +<<<<<<< HEAD U256::from(block.header.number), +======= + U256::from(block.header.number) +>>>>>>> 6072e82 (test with receipts encoding) ); assert_eq!( pi.state_root_raw(), diff --git a/mp2-v1/tests/common/block_extraction.rs b/mp2-v1/tests/common/block_extraction.rs index 1bda85eba..933823e56 100644 --- a/mp2-v1/tests/common/block_extraction.rs +++ b/mp2-v1/tests/common/block_extraction.rs @@ -1,7 +1,7 @@ use alloy::primitives::U256; use anyhow::Result; use mp2_common::{ - eth::BlockUtil, + eth::{left_pad_generic, BlockUtil, Rlpable}, proof::deserialize_proof, utils::{Endianness, Packer, ToFields}, C, D, F, From 0922676de837825a314cf52162adbfe282727bc0 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Fri, 18 Oct 2024 16:28:08 +0200 Subject: [PATCH 02/15] wip --- mp2-common/src/eth.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 779199dc4..a6f9c06ae 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -581,9 +581,13 @@ mod test { hex::encode(tx_receipt.0.tx_hash()) ); // in the MPT trie it's - // RLP ( RLP(Index), RLP ( LOGS )) + // RLP ( RLP(Index), RLP ( DATA )) // the second component is done like that: - // + // DATA = RLP [ Rlp(status), Rlp(gas_used), Rlp(logs_bloom), Rlp(logs) ] + // it contains multiple logs so + // logs = RLP_LIST(RLP(logs[0]), RLP(logs[1])...) + // Each RLP(logs[0]) = RLP([ RLP(Address), RLP(topics), RLP(data)]) + // RLP(topics) is a list with up to 4 topics let rlp_encoding = tx_receipt.receipt().encoded_2718(); let state = rlp::Rlp::new(&rlp_encoding); assert!(state.is_list()); @@ -593,8 +597,7 @@ mod test { // index 3 -> logs let logs_state = state.at(3).context("can't access logs field3")?; assert!(logs_state.is_list()); - // there should be only one log for this tx - let log_state = logs_state.at(0).context("can't access first log")?; + let log_state = logs_state.at(0).context("can't access single log state")?; assert!(log_state.is_list()); // log: // 0: address where it has been emitted @@ -606,6 +609,7 @@ mod test { hex_address, "BBd3EDd4D3b519c0d14965d9311185CFaC8c3220".to_lowercase(), ); + // the topics are in a list let topics: Vec> = log_state.list_at(1).context("can't decode topics")?; // Approval (index_topic_1 address owner, index_topic_2 address approved, index_topic_3 uint256 tokenId)View Source // first topic is signature of the event keccak(fn_name,args...) From 82f304d78c5fa00c74f19fe85d5c57e574fad9b2 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Mon, 21 Oct 2024 21:29:05 +0200 Subject: [PATCH 03/15] further testing --- mp2-common/src/eth.rs | 65 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 61 insertions(+), 4 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index a6f9c06ae..91ef3d5e2 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -340,6 +340,11 @@ impl BlockUtil { }; let body_rlp = receipt_primitive.encoded_2718(); + println!( + "TX index {} RLP encoded: {:?}", + receipt.transaction_index.unwrap(), + tx_index.to_vec() + ); receipts_trie .insert(&tx_index, &body_rlp) .expect("can't insert tx"); @@ -509,9 +514,9 @@ mod tryethers { for tr in tx_with_receipt.iter() { if tr.tx().transaction_index.unwrap() == U64::from(0) { println!( - "Ethers: Index {} -> {}", + "Ethers: Index {} -> {:?}", tr.tx().transaction_index.unwrap(), - hex::encode(tr.receipt_rlp()) + tr.receipt_rlp().to_vec() ); } receipts_trie @@ -546,7 +551,8 @@ mod test { use std::env; use std::str::FromStr; - use alloy::{primitives::Bytes, providers::ProviderBuilder}; + use alloy::{primitives::Bytes, providers::ProviderBuilder, rpc::types::BlockTransactionsKind}; + use eth_trie::Nibbles; use ethereum_types::U64; use ethers::{ providers::{Http, Middleware}, @@ -554,7 +560,11 @@ mod test { }; use hashbrown::HashMap; - use crate::utils::{Endianness, Packer}; + use crate::{ + mpt_sequential::utils::nibbles_to_bytes, + types::MAX_BLOCK_LEN, + utils::{Endianness, Packer}, + }; use mp2_test::eth::{get_mainnet_url, get_sepolia_url}; use super::*; @@ -589,14 +599,26 @@ mod test { // Each RLP(logs[0]) = RLP([ RLP(Address), RLP(topics), RLP(data)]) // RLP(topics) is a list with up to 4 topics let rlp_encoding = tx_receipt.receipt().encoded_2718(); + println!( + "Size of RLP encoded receipt in bytes: {}", + rlp_encoding.len() + ); let state = rlp::Rlp::new(&rlp_encoding); assert!(state.is_list()); // index 0 -> status, // index 1 -> gas used // index 2 -> logs_bloom // index 3 -> logs + let gas_used: Vec = state.val_at(1).context("can't access gas used")?; + println!("gas used byte length: {}", gas_used.len()); + let bloom: Vec = state.val_at(2).context("can't access bloom")?; + println!("bloom byte length: {}", bloom.len()); + //let logs: Vec> = state.list_at(3).context("can't access logs")?; + //println!("logs byte length: {}", logs.len()); + let logs_state = state.at(3).context("can't access logs field3")?; assert!(logs_state.is_list()); + println!("logs in hex: {}", hex::encode(logs_state.data()?)); let log_state = logs_state.at(0).context("can't access single log state")?; assert!(log_state.is_list()); // log: @@ -636,6 +658,41 @@ mod test { )); assert_eq!(expected_data, found_data); + let mpt_key = tx_receipt.0.transaction_index.unwrap(); + let proof = block + .receipts_trie + .get_proof(&mpt_key.rlp_bytes()) + .expect("can't retrieve mpt proof"); + let mpt_node = proof.last().unwrap(); + println!("MPT LEAF NODE: {:?}", mpt_node); + // First decode the top level header + let top_header = rlp::Rlp::new(mpt_node); + assert!(top_header.is_list()); + // then extract the buffer containing all elements (key and value) + let top_info = top_header.payload_info()?; + println!("TOP level header: {:?}", top_info); + let list_buff = &mpt_node[top_info.header_len..top_info.header_len + top_info.value_len]; + // then check the key and make sure it's equal to the RLP encoding of the index + let key_header = rlp::Rlp::new(list_buff); + assert!(!key_header.is_list()); + // key is RLP( compact ( RLP(index))) + let key_info = key_header.payload_info()?; + let compact_key = &list_buff[key_info.header_len..key_info.header_len + key_info.value_len]; + let decoded_key = rlp::encode(&nibbles_to_bytes( + Nibbles::from_compact(compact_key).nibbles(), + )); + assert_eq!(decoded_key, &mpt_key.rlp_bytes().to_vec(),); + + // then check if the value portion fits what we tested above + // value is RLP ( RLP(status, etc...)) + let outer_value_min = top_info.header_len + key_info.header_len + key_info.value_len; + let outer_value_buff = &mpt_node[outer_value_min..]; + let outer_value_state = rlp::Rlp::new(outer_value_buff); + assert!(!outer_value_state.is_list()); + let outer_payload = outer_value_state.payload_info()?; + let inner_value_min = outer_value_min + outer_payload.header_len; + let inner_value_buff = &mpt_node[inner_value_min..]; + assert_eq!(rlp_encoding, inner_value_buff); Ok(()) } From 375beb527f035c488f332b9676701c9523bad699 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Thu, 7 Nov 2024 11:00:36 +0000 Subject: [PATCH 04/15] WIP: Receipt Trie leaves --- Cargo.lock | 1 + mp2-common/src/array.rs | 185 ++++++- mp2-common/src/eth.rs | 483 +++++++++++++++-- mp2-common/src/group_hashing/mod.rs | 2 + mp2-common/src/mpt_sequential/key.rs | 26 +- .../src/mpt_sequential/leaf_or_extension.rs | 44 +- mp2-common/src/mpt_sequential/mod.rs | 309 ++++++----- mp2-common/src/rlp.rs | 24 +- mp2-test/Cargo.toml | 1 + mp2-test/src/mpt_sequential.rs | 152 ++++++ mp2-v1/src/contract_extraction/branch.rs | 18 +- mp2-v1/src/length_extraction/branch.rs | 8 +- mp2-v1/src/lib.rs | 1 + mp2-v1/src/receipt_extraction/leaf.rs | 510 ++++++++++++++++++ mp2-v1/src/receipt_extraction/mod.rs | 2 + .../src/receipt_extraction/public_inputs.rs | 76 +++ mp2-v1/src/values_extraction/branch.rs | 11 +- rustc-ice-2024-11-04T12_36_50-74186.txt | 63 +++ rustc-ice-2024-11-04T12_37_01-74253.txt | 62 +++ rustc-ice-2024-11-04T12_37_13-74307.txt | 62 +++ 20 files changed, 1828 insertions(+), 212 deletions(-) create mode 100644 mp2-v1/src/receipt_extraction/leaf.rs create mode 100644 mp2-v1/src/receipt_extraction/mod.rs create mode 100644 mp2-v1/src/receipt_extraction/public_inputs.rs create mode 100644 rustc-ice-2024-11-04T12_36_50-74186.txt create mode 100644 rustc-ice-2024-11-04T12_37_01-74253.txt create mode 100644 rustc-ice-2024-11-04T12_37_13-74307.txt diff --git a/Cargo.lock b/Cargo.lock index e2e45aa77..632e89037 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3946,6 +3946,7 @@ dependencies = [ "recursion_framework", "ryhope", "serde", + "tokio", ] [[package]] diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index ffcee5aa9..624e5e1bf 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -1,6 +1,9 @@ use crate::{ serialization::{deserialize_long_array, serialize_long_array}, - utils::{less_than_or_equal_to_unsafe, range_check_optimized, Endianness, PackerTarget}, + utils::{ + less_than_or_equal_to_unsafe, less_than_unsafe, range_check_optimized, Endianness, + PackerTarget, + }, }; use anyhow::{anyhow, Result}; use plonky2::{ @@ -600,6 +603,91 @@ where pub fn last(&self) -> T { self.arr[SIZE - 1] } + + /// This function allows you to search a larger [`Array`] by representing it as a number of + /// smaller [`Array`]s with size [`RANDOM_ACCESS_SIZE`], padding the final smaller array where required. + pub fn random_access_large_array, const D: usize>( + &self, + b: &mut CircuitBuilder, + at: Target, + ) -> T { + // We will split the array into smaller arrays of size 64, padding the last array with zeroes if required + let padded_size = (SIZE - 1) / RANDOM_ACCESS_SIZE + 1; + + // Create an array of `Array`s + let arrays: Vec> = (0..padded_size) + .map(|i| Array { + arr: create_array(|j| { + let index = 64 * i + j; + if index < self.arr.len() { + self.arr[index] + } else { + T::from_target(b.zero()) + } + }), + }) + .collect(); + + // We need to express `at` in base 64, we are also assuming that the initial array was smaller than 64^2 = 4096 which we enforce with a range check. + // We also check that `at` is smaller that the size of the array. + let array_size = b.constant(F::from_noncanonical_u64(SIZE as u64)); + let less_than_check = less_than_unsafe(b, at, array_size, 12); + let true_target = b._true(); + b.connect(less_than_check.target, true_target.target); + b.range_check(at, 12); + let (low_bits, high_bits) = b.split_low_high(at, 6, 12); + + // Search each of the smaller arrays for the target at `low_bits` + let first_search = arrays + .into_iter() + .map(|array| { + b.random_access( + low_bits, + array + .arr + .iter() + .map(Targetable::to_target) + .collect::>(), + ) + }) + .collect::>(); + + // Serach the result for the Target at `high_bits` + T::from_target(b.random_access(high_bits, first_search)) + } + + /// Returns [`self[at..at+SUB_SIZE]`]. + /// This is more expensive than [`Self::extract_array`] due to using [`Self::random_access_large_array`] + /// instead of [`Self::value_at`]. This function enforces that the values extracted are within the array. + pub fn extract_array_large< + F: RichField + Extendable, + const D: usize, + const SUB_SIZE: usize, + >( + &self, + b: &mut CircuitBuilder, + at: Target, + ) -> Array { + let m = b.constant(F::from_canonical_usize(SUB_SIZE)); + let array_len = b.constant(F::from_canonical_usize(SIZE)); + let upper_bound = b.add(at, m); + let num_bits_size = SIZE.ilog2() + 1; + + let lt = less_than_or_equal_to_unsafe(b, upper_bound, array_len, num_bits_size as usize); + + let t = b._true(); + b.connect(t.target, lt.target); + + Array:: { + arr: core::array::from_fn(|i| { + let i_target = b.constant(F::from_canonical_usize(i)); + let i_plus_n_target = b.add(at, i_target); + + // out_val = arr[((i+n)<=n+M) * (i+n)] + self.random_access_large_array(b, i_plus_n_target) + }), + } + } } /// Returns the size of the array in 32-bit units, rounded up. #[allow(non_snake_case)] @@ -815,6 +903,51 @@ mod test { run_circuit::(ValueAtCircuit { arr, idx, exp }); } + #[test] + fn test_random_access_large_array() { + const SIZE: usize = 512; + #[derive(Clone, Debug)] + struct ValueAtCircuit { + arr: [u8; SIZE], + idx: usize, + exp: u8, + } + impl UserCircuit for ValueAtCircuit + where + F: RichField + Extendable, + { + type Wires = (Array, Target, Target); + fn build(c: &mut CircuitBuilder) -> Self::Wires { + let array = Array::::new(c); + let exp_value = c.add_virtual_target(); + let index = c.add_virtual_target(); + let extracted = array.random_access_large_array(c, index); + c.connect(exp_value, extracted); + (array, index, exp_value) + } + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + wires + .0 + .assign(pw, &create_array(|i| F::from_canonical_u8(self.arr[i]))); + pw.set_target(wires.1, F::from_canonical_usize(self.idx)); + pw.set_target(wires.2, F::from_canonical_u8(self.exp)); + } + } + let mut rng = thread_rng(); + let mut arr = [0u8; SIZE]; + rng.fill(&mut arr[..]); + let idx: usize = rng.gen_range(0..SIZE); + let exp = arr[idx]; + run_circuit::(ValueAtCircuit { arr, idx, exp }); + + // Now we check that it fails when the index is too large + let idx = SIZE; + let result = std::panic::catch_unwind(|| { + run_circuit::(ValueAtCircuit { arr, idx, exp }) + }); + assert!(result.is_err()); + } + #[test] fn test_extract_array() { const SIZE: usize = 80; @@ -858,6 +991,56 @@ mod test { run_circuit::(ExtractArrayCircuit { arr, idx, exp }); } + #[test] + fn test_extract_array_large() { + const SIZE: usize = 512; + const SUBSIZE: usize = 40; + #[derive(Clone, Debug)] + struct ExtractArrayCircuit { + arr: [u8; SIZE], + idx: usize, + exp: [u8; SUBSIZE], + } + impl UserCircuit for ExtractArrayCircuit + where + F: RichField + Extendable, + { + type Wires = (Array, Target, Array); + fn build(c: &mut CircuitBuilder) -> Self::Wires { + let array = Array::::new(c); + let index = c.add_virtual_target(); + let expected = Array::::new(c); + let extracted = array.extract_array_large::<_, _, SUBSIZE>(c, index); + let are_equal = expected.equals(c, &extracted); + let tru = c._true(); + c.connect(are_equal.target, tru.target); + (array, index, expected) + } + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + wires + .0 + .assign(pw, &create_array(|i| F::from_canonical_u8(self.arr[i]))); + pw.set_target(wires.1, F::from_canonical_usize(self.idx)); + wires + .2 + .assign(pw, &create_array(|i| F::from_canonical_u8(self.exp[i]))); + } + } + let mut rng = thread_rng(); + let mut arr = [0u8; SIZE]; + rng.fill(&mut arr[..]); + let idx: usize = rng.gen_range(0..(SIZE - SUBSIZE)); + let exp = create_array(|i| arr[idx + i]); + run_circuit::(ExtractArrayCircuit { arr, idx, exp }); + + // It should panic if we try to extract an array where some of the indices fall outside of (0..SIZE) + let idx = SIZE; + let result = std::panic::catch_unwind(|| { + run_circuit::(ExtractArrayCircuit { arr, idx, exp }) + }); + assert!(result.is_err()); + } + #[test] fn test_contains_subarray() { #[derive(Clone, Debug)] diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 91ef3d5e2..8b0e9226b 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -1,21 +1,19 @@ //! Module containing several structure definitions for Ethereum related operations //! such as fetching blocks, transactions, creating MPTs, getting proofs, etc. use alloy::{ - consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom}, + consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom, TxEnvelope}, eips::BlockNumberOrTag, - network::{eip2718::Encodable2718, TransactionResponse}, - primitives::{Address, B256}, + json_abi::Event, + network::{eip2718::Encodable2718, BlockResponse, TransactionResponse}, + primitives::{Address, Log, LogData, B256}, providers::{Provider, RootProvider}, rlp::Encodable as AlloyEncodable, rpc::types::{ - Block, BlockTransactions, EIP1186AccountProofResponse, ReceiptEnvelope, Transaction, - }, - transports::{ - http::{Client, Http}, - Transport, + Block, BlockTransactions, EIP1186AccountProofResponse, Filter, ReceiptEnvelope, Transaction, }, + transports::Transport, }; -use anyhow::{bail, Context, Result}; +use anyhow::{anyhow, bail, Context, Result}; use eth_trie::{EthTrie, MemoryDB, Trie}; use ethereum_types::H256; use log::warn; @@ -118,6 +116,175 @@ pub struct ProofQuery { pub(crate) slot: StorageSlot, } +/// Struct used for storing relevant data to query blocks as they come in. +#[derive(Debug, Clone)] +pub struct ReceiptQuery { + /// The contract that emits the event we care about + pub contract: Address, + /// The event we wish to monitor for, + pub event: Event, +} + +/// Struct used to store all the information needed for proving a leaf in the Receipt Trie is one we care about. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReceiptProofInfo { + /// The MPT proof that this Receipt is in the tree + pub mpt_proof: Vec>, + /// The index of this transaction in the block + pub tx_index: u64, + /// The size of the index in bytes + pub index_size: usize, + /// The offset in the leaf (in RLP form) to status + pub status_offset: usize, + /// The offset in the leaf (in RLP form) to the start of logs + pub logs_offset: usize, + /// Data about the type of log we are proving the existence of + pub event_log_info: EventLogInfo, + /// The offsets for the relevant logs + pub relevant_logs_offset: Vec, +} + +/// Contains all the information for an [`Event`] in rlp form +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +pub struct EventLogInfo { + /// Size in bytes of the whole log rlp encoded + pub size: usize, + /// Packed contract address to check + pub address: Address, + /// Byte offset for the address from the beginning of a Log + pub add_rel_offset: usize, + /// Packed event signature, + pub event_signature: [u8; 32], + /// Byte offset from the start of the log to event signature + pub sig_rel_offset: usize, + /// The topics for this Log + pub topics: [LogDataInfo; 3], + /// The extra data stored by this Log + pub data: [LogDataInfo; 2], +} + +/// Contains all the information for data contained in an [`Event`] +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] +pub struct LogDataInfo { + pub column_id: usize, + /// The byte offset from the beggining of the log to this target + pub rel_byte_offset: usize, + /// The length of this topic/data + pub len: usize, +} + +impl TryFrom<&Log> for EventLogInfo { + type Error = anyhow::Error; + + fn try_from(log: &Log) -> std::result::Result { + // First we encode the log in rlp form + let mut buf = Vec::::new(); + log.encode(&mut buf); + + let rlp_log = rlp::Rlp::new(&buf); + // Extract the header + let log_header = rlp_log.payload_info()?; + let next_data = &buf[log_header.header_len..log_header.header_len + log_header.value_len]; + let rlp_log_no_header = rlp::Rlp::new(next_data); + // Find the address offset (skipping its header) + let address_header = rlp_log_no_header.payload_info()?; + let rel_address_offset = log_header.header_len + address_header.header_len; + // Find the signature offset (skipping its header) + let topics_data = &buf[rel_address_offset + address_header.value_len + ..log_header.header_len + log_header.value_len]; + let topics_rlp = rlp::Rlp::new(topics_data); + let topics_header = topics_rlp.payload_info()?; + let topic_0_data = + &buf[rel_address_offset + address_header.value_len + topics_header.header_len + ..log_header.header_len + + address_header.header_len + + address_header.value_len + + topics_header.header_len + + topics_header.value_len]; + let topic_0_rlp = rlp::Rlp::new(topic_0_data); + let topic_0_header = topic_0_rlp.payload_info()?; + let rel_sig_offset = log_header.header_len + + address_header.header_len + + address_header.value_len + + topics_header.header_len + + topic_0_header.header_len; + let event_signature: [u8; 32] = buf[rel_sig_offset..rel_sig_offset + 32].try_into()?; + // Each topic takes 33 bytes to encode so we divide this length by 33 to get the number of topics remaining + let remaining_topics = buf[rel_sig_offset + topic_0_header.value_len + ..log_header.header_len + + address_header.header_len + + address_header.value_len + + topics_header.header_len + + topics_header.value_len] + .len() + / 33; + + let mut topics = [LogDataInfo::default(); 3]; + let mut current_topic_offset = rel_sig_offset + topic_0_header.value_len + 1; + topics + .iter_mut() + .enumerate() + .take(remaining_topics) + .for_each(|(j, info)| { + *info = LogDataInfo { + column_id: j, + rel_byte_offset: current_topic_offset, + len: 32, + }; + current_topic_offset += 33; + }); + + // Deal with any remaining data + let mut data = [LogDataInfo::default(); 2]; + + let data_vec = if current_topic_offset < buf.len() { + buf.iter() + .skip(current_topic_offset - 1) + .copied() + .collect::>() + } else { + vec![] + }; + + if !data_vec.is_empty() { + let data_rlp = rlp::Rlp::new(&data_vec); + let data_header = data_rlp.payload_info()?; + // Since we can deal with at most two words of additional data we only need to take 66 bytes from this list + let mut additional_offset = data_header.header_len; + data_vec[data_header.header_len..] + .chunks(33) + .enumerate() + .take(2) + .try_for_each(|(j, chunk)| { + let chunk_rlp = rlp::Rlp::new(chunk); + let chunk_header = chunk_rlp.payload_info()?; + if chunk_header.value_len <= 32 { + data[j] = LogDataInfo { + column_id: 3 + j, + rel_byte_offset: current_topic_offset + + additional_offset + + chunk_header.header_len, + len: chunk_header.value_len, + }; + additional_offset += chunk_header.header_len + chunk_header.value_len; + } else { + return Ok(()); + } + Result::<(), anyhow::Error>::Ok(()) + })?; + } + Ok(EventLogInfo { + size: log_header.header_len + log_header.value_len, + address: log.address, + add_rel_offset: rel_address_offset, + event_signature, + sig_rel_offset: rel_sig_offset, + topics, + data, + }) + } +} + #[derive(Clone, Debug, Serialize, Deserialize)] pub enum StorageSlot { /// simple storage slot like a uin256 etc that fits in 32bytes @@ -259,6 +426,102 @@ impl ProofQuery { } } +impl ReceiptQuery { + pub fn new(contract: Address, event: Event) -> Self { + Self { contract, event } + } + + /// Function that returns the MPT Trie inclusion proofs for all receipts in a block whose logs contain + /// the specified event for the contract. + pub async fn query_receipt_proofs( + &self, + provider: &RootProvider, + block: BlockNumberOrTag, + ) -> Result> { + let expected_topic_0 = B256::from_slice(&keccak256(self.event.signature().as_bytes())); + let filter = Filter::new() + .select(block) + .address(self.contract) + .event(&self.event.signature()); + let logs = provider.get_logs(&filter).await?; + // Find the length of the RLP encoded log + let event_log_info: EventLogInfo = (&logs + .first() + .ok_or(anyhow!("No relevant logs in this block"))? + .inner) + .try_into()?; + + // For each of the logs return the transacion its included in, then sort and remove duplicates. + let mut tx_indices = logs + .iter() + .map(|log| log.transaction_index) + .collect::>>() + .ok_or(anyhow!("One of the logs did not have a transaction index"))?; + tx_indices.sort(); + tx_indices.dedup(); + + // Construct the Receipt Trie for this block so we can retrieve MPT proofs. + let mut block_util = BlockUtil::fetch(provider, block).await?; + + let proofs = tx_indices + .into_iter() + .map(|index| { + let key = index.rlp_bytes(); + let index_size = key.len(); + let proof = block_util.receipts_trie.get_proof(&key)?; + let receipt = block_util.txs[index as usize].receipt(); + let rlp_body = receipt.encoded_2718(); + // Skip the first byte as it refers to the transaction type + let length_hint = rlp_body[1] as usize - 247; + + let status_offset = 2 + length_hint; + let gas_hint = rlp_body[3 + length_hint] as usize - 128; + // Logs bloom is always 256 bytes long and comes after the gas used the first byte is 185 then 1 then 0 then the bloom so the + // log data starts at 4 + length_hint + gas_hint + 259 + let log_offset = 4 + length_hint + gas_hint + 259; + + let log_hint = if rlp_body[log_offset] < 247 { + rlp_body[log_offset] as usize - 192 + } else { + rlp_body[log_offset] as usize - 247 + }; + // We iterate through the logs and store the offsets we care about. + let mut current_log_offset = log_offset + 1 + log_hint; + + let relevant_logs = receipt + .logs() + .iter() + .filter_map(|log| { + let length = log.length(); + if log.address == self.contract + && log.data.topics().contains(&expected_topic_0) + { + let out = current_log_offset; + current_log_offset += length; + Some(out) + } else { + current_log_offset += length; + None + } + }) + .collect::>(); + + Ok(ReceiptProofInfo { + mpt_proof: proof, + tx_index: index, + index_size, + status_offset, + logs_offset: log_offset, + event_log_info, + relevant_logs_offset: relevant_logs, + }) + }) + .collect::, eth_trie::TrieError>>()?; + + Ok(proofs) + } +} + impl Rlpable for alloy::rpc::types::Block { fn rlp(&self) -> Vec { let mut out = Vec::new(); @@ -285,17 +548,24 @@ pub struct BlockUtil { pub block: Block, pub txs: Vec, pub receipts_trie: EthTrie, + pub transactions_trie: EthTrie, } pub struct TxWithReceipt(Transaction, ReceiptEnvelope); impl TxWithReceipt { - pub fn receipt(&self) -> ReceiptEnvelope { - self.1.clone() + pub fn receipt(&self) -> &ReceiptEnvelope { + &self.1 + } + pub fn transaction(&self) -> &Transaction { + &self.0 } } impl BlockUtil { - pub async fn fetch(t: RootProvider>, id: BlockNumberOrTag) -> Result { + pub async fn fetch( + t: &RootProvider, + id: BlockNumberOrTag, + ) -> Result { let block = t .get_block(id.into(), alloy::rpc::types::BlockTransactionsKind::Full) .await? @@ -304,42 +574,36 @@ impl BlockUtil { .get_block_receipts(id.into()) .await? .context("can't get receipts")?; - let BlockTransactions::Full(all_tx) = block.transactions.clone() else { + let BlockTransactions::Full(all_tx) = block.transactions() else { bail!("can't see full transactions"); }; - let tx_receipts: Vec<(_, _)> = receipts - .into_iter() - .map(|receipt| { - ( - all_tx - .iter() - .find(|tx| tx.tx_hash() == receipt.transaction_hash) - .expect("no tx with receipt hash") - .clone(), - receipt, - ) - }) - .collect(); // check receipt root let memdb = Arc::new(MemoryDB::new(true)); - let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); - let consensus_receipts = tx_receipts + let mut receipts_trie = EthTrie::new(memdb.clone()); + let mut transactions_trie = EthTrie::new(memdb.clone()); + let consensus_receipts = receipts .into_iter() - .map(|tr| { - let receipt = tr.1; + .zip(all_tx.into_iter()) + .map(|(receipt, transaction)| { let tx_index = receipt.transaction_index.unwrap().rlp_bytes(); - //let mut buff = Vec::new(); - let receipt_primitive = receipt.inner.clone(); - let receipt_primitive = match receipt_primitive { - CRE::Legacy(ref r) => CRE::Legacy(from_rpc_logs_to_consensus(&r)), - CRE::Eip2930(ref r) => CRE::Eip2930(from_rpc_logs_to_consensus(&r)), - CRE::Eip1559(ref r) => CRE::Eip1559(from_rpc_logs_to_consensus(&r)), - CRE::Eip4844(ref r) => CRE::Eip4844(from_rpc_logs_to_consensus(&r)), - CRE::Eip7702(ref r) => CRE::Eip7702(from_rpc_logs_to_consensus(&r)), + + let receipt_primitive = match receipt.inner { + CRE::Legacy(ref r) => CRE::Legacy(from_rpc_logs_to_consensus(r)), + CRE::Eip2930(ref r) => CRE::Eip2930(from_rpc_logs_to_consensus(r)), + CRE::Eip1559(ref r) => CRE::Eip1559(from_rpc_logs_to_consensus(r)), + CRE::Eip4844(ref r) => CRE::Eip4844(from_rpc_logs_to_consensus(r)), + CRE::Eip7702(ref r) => CRE::Eip7702(from_rpc_logs_to_consensus(r)), _ => panic!("aie"), }; + + let transaction_primitive = match TxEnvelope::try_from(transaction.clone()) { + Ok(t) => t, + _ => panic!("Couldn't get transaction envelope"), + }; + let body_rlp = receipt_primitive.encoded_2718(); + let tx_body_rlp = transaction_primitive.encoded_2718(); println!( "TX index {} RLP encoded: {:?}", receipt.transaction_index.unwrap(), @@ -347,25 +611,31 @@ impl BlockUtil { ); receipts_trie .insert(&tx_index, &body_rlp) - .expect("can't insert tx"); - TxWithReceipt(tr.0, receipt_primitive) + .expect("can't insert receipt"); + transactions_trie + .insert(&tx_index, &tx_body_rlp) + .expect("can't insert transaction"); + TxWithReceipt(transaction.clone(), receipt_primitive) }) .collect::>(); Ok(BlockUtil { block, txs: consensus_receipts, receipts_trie, + transactions_trie, }) } // recompute the receipts trie by first converting all receipts form RPC type to consensus type // since in Alloy these are two different types and RLP functions are only implemented for // consensus ones. - // TODO: transaction trie fn check(&mut self) -> Result<()> { - let computed = self.receipts_trie.root_hash().expect("root hash problem"); + let computed = self.receipts_trie.root_hash()?; + let tx_computed = self.transactions_trie.root_hash()?; let expected = self.block.header.receipts_root; - assert_eq!(expected.to_vec(), computed.0.to_vec()); + let tx_expected = self.block.header.transactions_root; + assert_eq!(expected.0, computed.0); + assert_eq!(tx_expected.0, tx_computed.0); Ok(()) } } @@ -551,7 +821,13 @@ mod test { use std::env; use std::str::FromStr; - use alloy::{primitives::Bytes, providers::ProviderBuilder, rpc::types::BlockTransactionsKind}; + use alloy::{ + node_bindings::Anvil, + primitives::{Bytes, Log}, + providers::ProviderBuilder, + rlp::Decodable, + sol, + }; use eth_trie::Nibbles; use ethereum_types::U64; use ethers::{ @@ -576,7 +852,7 @@ mod test { let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); let bn = 6893107; let bna = BlockNumberOrTag::Number(bn); - let mut block = BlockUtil::fetch(provider, bna).await?; + let mut block = BlockUtil::fetch(&provider, bna).await?; // check if we compute the RLP correctly now block.check()?; let mut be = tryethers::BlockData::fetch(bn, url).await?; @@ -696,6 +972,123 @@ mod test { Ok(()) } + #[tokio::test] + async fn test_receipt_query() -> Result<()> { + // Spin up a local node. + let anvil = Anvil::new().spawn(); + // Create a provider with the wallet for contract deployment and interaction. + let rpc_url = anvil.endpoint(); + + let rpc = ProviderBuilder::new().on_http(rpc_url.parse().unwrap()); + + // Make a contract taht emits events so we can pick up on them + sol! { + #[allow(missing_docs)] + // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780638381f58a14610058578063d09de08a14610076578063db73227914610080575b5f80fd5b61005661008a565b005b6100606100f8565b60405161006d9190610165565b60405180910390f35b61007e6100fd565b005b610088610115565b005b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100c06100fd565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100f66100fd565b565b5f5481565b5f8081548092919061010e906101ab565b9190505550565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a261014b6100fd565b565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212202787ca0f2ea71e118bc4d1bf239cde5ec4730aeb35a404c44e6c9d587316418564736f6c634300081a0033")] + contract EventEmitter { + uint256 public number; + event testEvent(uint256 indexed num); + + function testEmit() public { + emit testEvent(number); + increment(); + } + + function twoEmits() public { + emit testEvent(number); + increment(); + emit testEvent(number); + increment(); + } + + function increment() public { + number++; + } + } + } + // Deploy the contract using anvil + let contract = EventEmitter::deploy(&rpc).await?; + + // Fire off a few transactions to emit some events + let mut transactions = Vec::::new(); + + for i in 0..10 { + if i % 2 == 0 { + let builder = contract.testEmit(); + let tx_hash = builder.send().await?.watch().await?; + let transaction = rpc.get_transaction_by_hash(tx_hash).await?.unwrap(); + transactions.push(transaction); + } else { + let builder = contract.twoEmits(); + let tx_hash = builder.send().await?.watch().await?; + let transaction = rpc.get_transaction_by_hash(tx_hash).await?.unwrap(); + transactions.push(transaction); + } + } + + // We want to get the event signature so we can make a ReceiptQuery + let all_events = EventEmitter::abi::events(); + + let events = all_events.get("testEvent").unwrap(); + let receipt_query = ReceiptQuery::new(*contract.address(), events[0].clone()); + + // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it + for transaction in transactions.iter() { + let index = transaction + .block_number + .ok_or(anyhow!("Could not get block number from transaction"))?; + let block = rpc + .get_block( + BlockNumberOrTag::Number(index).into(), + alloy::rpc::types::BlockTransactionsKind::Full, + ) + .await? + .ok_or(anyhow!("Could not get block test"))?; + let proofs = receipt_query + .query_receipt_proofs(&rpc, BlockNumberOrTag::Number(index)) + .await?; + + for proof in proofs.into_iter() { + let memdb = Arc::new(MemoryDB::new(true)); + let tx_trie = EthTrie::new(Arc::clone(&memdb)); + + let mpt_key = transaction.transaction_index.unwrap().rlp_bytes(); + let receipt_hash = block.header().receipts_root; + let is_valid = tx_trie + .verify_proof(receipt_hash.0.into(), &mpt_key, proof.mpt_proof.clone())? + .ok_or(anyhow!("No proof found when verifying"))?; + + let expected_sig: [u8; 32] = keccak256(receipt_query.event.signature().as_bytes()) + .try_into() + .unwrap(); + + for log_offset in proof.relevant_logs_offset.iter() { + let mut buf = &is_valid[*log_offset..*log_offset + proof.event_log_info.size]; + let decoded_log = Log::decode(&mut buf)?; + let raw_bytes: [u8; 20] = is_valid[*log_offset + + proof.event_log_info.add_rel_offset + ..*log_offset + proof.event_log_info.add_rel_offset + 20] + .to_vec() + .try_into() + .unwrap(); + assert_eq!(decoded_log.address, receipt_query.contract); + assert_eq!(raw_bytes, receipt_query.contract); + let topics = decoded_log.topics(); + assert_eq!(topics[0].0, expected_sig); + let raw_bytes: [u8; 32] = is_valid[*log_offset + + proof.event_log_info.sig_rel_offset + ..*log_offset + proof.event_log_info.sig_rel_offset + 32] + .to_vec() + .try_into() + .unwrap(); + assert_eq!(topics[0].0, raw_bytes); + } + } + } + Ok(()) + } + #[tokio::test] async fn test_sepolia_slot() -> Result<()> { #[cfg(feature = "ci")] diff --git a/mp2-common/src/group_hashing/mod.rs b/mp2-common/src/group_hashing/mod.rs index 819eb7c2b..05c0d34ca 100644 --- a/mp2-common/src/group_hashing/mod.rs +++ b/mp2-common/src/group_hashing/mod.rs @@ -21,6 +21,8 @@ use plonky2_ecgfp5::{ }, }; +use std::array::from_fn as create_array; + mod curve_add; pub mod field_to_curve; mod sswu_gadget; diff --git a/mp2-common/src/mpt_sequential/key.rs b/mp2-common/src/mpt_sequential/key.rs index d7129fd84..f98b57aac 100644 --- a/mp2-common/src/mpt_sequential/key.rs +++ b/mp2-common/src/mpt_sequential/key.rs @@ -15,25 +15,37 @@ use plonky2::{ use plonky2_crypto::u32::arithmetic_u32::U32Target; use serde::{Deserialize, Serialize}; +pub type MPTKeyWire = MPTKeyWireGeneric; + +pub type ReceiptKeyWire = MPTKeyWireGeneric; + +pub const MAX_TX_KEY_NIBBLE_LEN: usize = 6; + /// Calculate the pointer from the MPT key. pub fn mpt_key_ptr(mpt_key: &[u8]) -> usize { let nibbles = Nibbles::from_compact(mpt_key); MAX_KEY_NIBBLE_LEN - 1 - nibbles.nibbles().len() } +/// Calculate the pointer from the MPT key. +pub fn receipt_key_ptr(mpt_key: &[u8]) -> usize { + let nibbles = Nibbles::from_compact(mpt_key); + MAX_TX_KEY_NIBBLE_LEN - 1 - nibbles.nibbles().len() +} + /// A structure that keeps a running pointer to the portion of the key the circuit /// already has proven. #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] -pub struct MPTKeyWire { +pub struct MPTKeyWireGeneric { /// Represents the full key of the value(s) we're looking at in the MPT trie. - pub key: Array, + pub key: Array, /// Represents which portion of the key we already processed. The pointer /// goes _backwards_ since circuit starts proving from the leaf up to the root. /// i.e. pointer must be equal to F::NEG_ONE when we reach the root. pub pointer: Target, } -impl MPTKeyWire { +impl MPTKeyWireGeneric { pub fn current_nibble, const D: usize>( &self, b: &mut CircuitBuilder, @@ -72,7 +84,7 @@ impl MPTKeyWire { /// Create a new fresh key wire pub fn new, const D: usize>(b: &mut CircuitBuilder) -> Self { Self { - key: Array::::new(b), + key: Array::::new(b), pointer: b.add_virtual_target(), } } @@ -80,7 +92,7 @@ impl MPTKeyWire { pub fn assign( &self, p: &mut PartialWitness, - key_nibbles: &[u8; MAX_KEY_NIBBLE_LEN], + key_nibbles: &[u8; KEY_LENGTH], ptr: usize, ) { let f_nibbles = create_array(|i| F::from_canonical_u8(key_nibbles[i])); @@ -141,7 +153,7 @@ impl MPTKeyWire { // now we need to pack each pair of 2 bit limbs into a nibble, but for each byte we want nibbles to // be ordered in big-endian limbs - .chunks(4) + .chunks_exact(4) .flat_map(|chunk| { vec![ b.mul_const_add(F::from_canonical_u8(4), chunk[3], chunk[2]), @@ -154,7 +166,7 @@ impl MPTKeyWire { .try_into() .unwrap(), }, - pointer: b.constant(F::from_canonical_usize(MAX_KEY_NIBBLE_LEN - 1)), + pointer: b.constant(F::from_canonical_usize(KEY_LENGTH - 1)), } } } diff --git a/mp2-common/src/mpt_sequential/leaf_or_extension.rs b/mp2-common/src/mpt_sequential/leaf_or_extension.rs index 96b3b6355..8c64d7584 100644 --- a/mp2-common/src/mpt_sequential/leaf_or_extension.rs +++ b/mp2-common/src/mpt_sequential/leaf_or_extension.rs @@ -1,10 +1,10 @@ //! MPT leaf or extension node gadget -use super::{Circuit as MPTCircuit, MPTKeyWire, PAD_LEN}; +use super::{advance_key_leaf_or_extension, key::MPTKeyWireGeneric, PAD_LEN}; use crate::{ array::{Array, Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires}, - rlp::decode_fixed_list, + rlp::{decode_fixed_list, MAX_KEY_NIBBLE_LEN}, types::GFp, }; use plonky2::{ @@ -15,10 +15,16 @@ use plonky2::{ }; use serde::{Deserialize, Serialize}; +pub type MPTLeafOrExtensionWires = + MPTLeafOrExtensionWiresGeneric; + /// Wrapped wires for a MPT leaf or extension node #[derive(Clone, Debug, Serialize, Deserialize)] -pub struct MPTLeafOrExtensionWires -where +pub struct MPTLeafOrExtensionWiresGeneric< + const NODE_LEN: usize, + const VALUE_LEN: usize, + const KEY_LEN: usize, +> where [(); PAD_LEN(NODE_LEN)]:, { /// MPT node @@ -26,12 +32,13 @@ where /// MPT root pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, /// New MPT key after advancing the current key - pub key: MPTKeyWire, + pub key: MPTKeyWireGeneric, /// New MPT value pub value: Array, } -impl MPTLeafOrExtensionWires +impl + MPTLeafOrExtensionWiresGeneric where [(); PAD_LEN(NODE_LEN)]:, { @@ -41,10 +48,12 @@ where } } +pub type MPTLeafOrExtensionNode = MPTLeafOrExtensionNodeGeneric; + /// MPT leaf or extension node gadget -pub struct MPTLeafOrExtensionNode; +pub struct MPTLeafOrExtensionNodeGeneric; -impl MPTLeafOrExtensionNode { +impl MPTLeafOrExtensionNodeGeneric { /// Build the MPT node and advance the current key. pub fn build_and_advance_key< F: RichField + Extendable, @@ -53,8 +62,8 @@ impl MPTLeafOrExtensionNode { const VALUE_LEN: usize, >( b: &mut CircuitBuilder, - current_key: &MPTKeyWire, - ) -> MPTLeafOrExtensionWires + current_key: &MPTKeyWireGeneric, + ) -> MPTLeafOrExtensionWiresGeneric where [(); PAD_LEN(NODE_LEN)]:, { @@ -70,15 +79,16 @@ impl MPTLeafOrExtensionNode { // Advance the key and extract the value (only decode two headers in the case of leaf). let rlp_headers = decode_fixed_list::<_, D, 2>(b, &node.arr.arr, zero); - let (key, value, valid) = MPTCircuit::<1, NODE_LEN>::advance_key_leaf_or_extension::< - F, - D, - 2, - VALUE_LEN, - >(b, &node.arr, current_key, &rlp_headers); + let (key, value, valid) = + advance_key_leaf_or_extension::( + b, + &node.arr, + current_key, + &rlp_headers, + ); b.connect(tru.target, valid.target); - MPTLeafOrExtensionWires { + MPTLeafOrExtensionWiresGeneric { node, root, key, diff --git a/mp2-common/src/mpt_sequential/mod.rs b/mp2-common/src/mpt_sequential/mod.rs index 92418d0f7..3c6dd8be4 100644 --- a/mp2-common/src/mpt_sequential/mod.rs +++ b/mp2-common/src/mpt_sequential/mod.rs @@ -1,3 +1,4 @@ +use crate::rlp::MAX_KEY_NIBBLE_LEN; use crate::serialization::{ deserialize_array, deserialize_long_array, serialize_array, serialize_long_array, }; @@ -8,14 +9,12 @@ use crate::{ compute_size_with_padding, InputData, KeccakCircuit, KeccakWires, OutputHash, HASH_LEN, PACKED_HASH_LEN, }, - rlp::{ - decode_compact_encoding, decode_fixed_list, RlpHeader, RlpList, MAX_ITEMS_IN_LIST, - MAX_KEY_NIBBLE_LEN, - }, + rlp::{decode_compact_encoding, decode_fixed_list, RlpHeader, RlpList, MAX_ITEMS_IN_LIST}, utils::{find_index_subvector, keccak256}, }; use anyhow::{anyhow, Result}; use core::array::from_fn as create_array; + use plonky2::{ field::extension::Extendable, hash::hash_types::RichField, @@ -33,8 +32,14 @@ mod key; mod leaf_or_extension; pub mod utils; -pub use key::{mpt_key_ptr, MPTKeyWire}; -pub use leaf_or_extension::{MPTLeafOrExtensionNode, MPTLeafOrExtensionWires}; +pub use key::{ + mpt_key_ptr, receipt_key_ptr, MPTKeyWire, MPTKeyWireGeneric, ReceiptKeyWire, + MAX_TX_KEY_NIBBLE_LEN, +}; +pub use leaf_or_extension::{ + MPTLeafOrExtensionNode, MPTLeafOrExtensionNodeGeneric, MPTLeafOrExtensionWires, + MPTLeafOrExtensionWiresGeneric, +}; /// Number of items in the RLP encoded list in a leaf node. const NB_ITEMS_LEAF: usize = 2; @@ -44,6 +49,11 @@ const NB_ITEMS_LEAF: usize = 2; /// Given we target MPT storage proof, the value is 32 bytes + 1 byte for RLP encoding. pub const MAX_LEAF_VALUE_LEN: usize = 33; +/// This is the maximum size we allow for the value of Receipt Trie leaf +/// currently set to be the same as we allow for a branch node in the Storage Trie +/// minus the length of the key header and key +pub const MAX_RECEIPT_LEAF_VALUE_LEN: usize = 526; + /// RLP item size for the extension node pub const MPT_EXTENSION_RLP_SIZE: usize = 2; @@ -56,6 +66,17 @@ pub const MPT_BRANCH_RLP_SIZE: usize = 17; pub const fn PAD_LEN(d: usize) -> usize { compute_size_with_padding(d) } + +/// const function to allow arrays of half a generics size without additional generics +#[allow(non_snake_case)] +pub const fn NIBBLES_TO_BYTES(d: usize) -> usize { + d >> 1 +} + +/// We export a type here to keep it consistent with the already established codebase. +pub type MPTCircuit = + Circuit; + /// Circuit that simoply proves the inclusion of a value inside a MPT tree. /// /// . DEPTH is the maximal depth of the tree. If the tree is smaller, the circuit @@ -65,23 +86,29 @@ pub const fn PAD_LEN(d: usize) -> usize { /// branch node can be up to 32 * 17 = 544 bytes. /// - Note since it uses keccak, the array being hashed is larger because /// keccak requires padding. +/// KEY_LEN is the maximum length of the MPT key (differs between storage tries and transaction/receipt tries) #[derive(Clone, Debug)] -pub struct Circuit { +pub struct Circuit< + const DEPTH: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, + const KEY_LEN_BYTES: usize = { NIBBLES_TO_BYTES(KEY_LEN) }, +> { /// for ease of usage, we take vector here and the circuit is doing the padding nodes: Vec>, /// the full key that we are trying to prove in this trie /// NOTE: the key is in bytes. This code will transform it into nibbles /// before passing it to circuit, i.e. the circuit takes the key in nibbles /// whose length == MAX_KEY_NIBBLE_LEN - key: [u8; MAX_KEY_NIBBLE_LEN / 2], + key: [u8; KEY_LEN_BYTES], } #[derive(Serialize, Deserialize, Clone, Debug)] -pub struct InputWires +pub struct InputWires where [(); PAD_LEN(NODE_LEN)]:, [(); DEPTH - 1]:, { - pub(crate) key: MPTKeyWire, + pub(crate) key: MPTKeyWireGeneric, /// a vector of buffers whose size is the padded size of the maximum node length /// the padding may occur anywhere in the array but it can fit the maximum node size /// NOTE: this makes the code a bit harder grasp at first, but it's a straight @@ -122,27 +149,28 @@ where pub root: OutputHash, } -impl Circuit +impl + Circuit where [(); PAD_LEN(NODE_LEN)]:, [(); DEPTH - 1]:, { - pub fn new(key: [u8; MAX_KEY_NIBBLE_LEN / 2], proof: Vec>) -> Self { + pub fn new(key: [u8; NIBBLES_TO_BYTES(KEY_LEN)], proof: Vec>) -> Self { Self { nodes: proof, key } } pub fn create_input_wires( b: &mut CircuitBuilder, - key: Option, // Could set the full key from outside - ) -> InputWires + key: Option>, // Could set the full key from outside + ) -> InputWires where F: RichField + Extendable, { // full key is expected to be given by verifier (done in UserCircuit impl) // initial key has the pointer that is set at the maximum length - 1 (it's an index, so 0-based) - let key = key.unwrap_or_else(|| MPTKeyWire { - key: Array::::new(b), - pointer: b.constant(F::from_canonical_usize(MAX_KEY_NIBBLE_LEN) - F::ONE), + let key = key.unwrap_or_else(|| MPTKeyWireGeneric:: { + key: Array::::new(b), + pointer: b.constant(F::from_canonical_usize(KEY_LEN) - F::ONE), }); let should_process: [BoolTarget; DEPTH - 1] = create_array(|_| b.add_virtual_bool_target_safe()); @@ -162,7 +190,7 @@ where /// to be done by the caller. pub fn verify_mpt_proof( b: &mut CircuitBuilder, - inputs: &InputWires, + inputs: &InputWires, ) -> OutputWires where F: RichField + Extendable, @@ -177,12 +205,8 @@ where // small optimization here as we only need to decode two items for a leaf, since we know it's a leaf let leaf_headers = decode_fixed_list::<_, _, NB_ITEMS_LEAF>(b, &inputs.nodes[0].arr.arr, zero); - let (mut iterative_key, leaf_value, is_leaf) = Self::advance_key_leaf_or_extension( - b, - &inputs.nodes[0].arr, - &inputs.key, - &leaf_headers, - ); + let (mut iterative_key, leaf_value, is_leaf) = + advance_key_leaf_or_extension(b, &inputs.nodes[0].arr, &inputs.key, &leaf_headers); b.connect(t.target, is_leaf.target); let mut last_hash_output = leaf_hash.output_array.clone(); let mut keccak_wires = vec![leaf_hash]; @@ -239,7 +263,7 @@ where pub fn assign_wires, const D: usize>( &self, p: &mut PartialWitness, - inputs: &InputWires, + inputs: &InputWires, outputs: &OutputWires, ) -> Result<()> { let pad_len = DEPTH.checked_sub(self.nodes.len()).ok_or(anyhow!( @@ -302,8 +326,12 @@ where pub fn advance_key, const D: usize>( b: &mut CircuitBuilder, node: &Array, - key: &MPTKeyWire, - ) -> (MPTKeyWire, Array, BoolTarget) { + key: &MPTKeyWireGeneric, + ) -> ( + MPTKeyWireGeneric, + Array, + BoolTarget, + ) { let zero = b.zero(); // It will try to decode a RLP list of the maximum number of items there can be // in a list, which is 16 for a branch node (Excluding value). @@ -313,9 +341,9 @@ where // if it's more ==> node's a branch node // RLP ( RLP(hash1), RLP(hash2), ... RLP(hash16), RLP(value)) let rlp_headers = decode_fixed_list::(b, &node.arr, zero); - let leaf_info = Self::advance_key_leaf_or_extension(b, node, key, &rlp_headers); + let leaf_info = advance_key_leaf_or_extension(b, node, key, &rlp_headers); let tuple_condition = leaf_info.2; - let branch_info = Self::advance_key_branch(b, node, key, &rlp_headers); + let branch_info = advance_key_branch(b, node, key, &rlp_headers); // ensures it's either a branch or leaf/extension let tuple_or_branch = b.or(leaf_info.2, branch_info.2); @@ -327,78 +355,94 @@ where (new_key, child_hash, tuple_or_branch) } +} - /// This function advances the pointer of the MPT key. The parameters are: - /// * The key where to lookup the next nibble and thus the hash stored at - /// nibble position in the branch node. - /// * RLP headers of the current node. - /// And it returns: - /// * New key with the pointer moved. - /// * The child hash / value of the node. - /// * A boolean that must be true if the given node is a leaf or an extension. - /// * The nibble position before this advance. - pub fn advance_key_branch, const D: usize>( - b: &mut CircuitBuilder, - node: &Array, - key: &MPTKeyWire, - rlp_headers: &RlpList, - ) -> (MPTKeyWire, Array, BoolTarget, Target) { - let one = b.one(); - // assume it's a node and return the boolean condition that must be true if - // it is a node - decided in advance_key function - let seventeen = b.constant(F::from_canonical_usize(MAX_ITEMS_IN_LIST)); - let branch_condition = b.is_equal(seventeen, rlp_headers.num_fields); - - // Given we are reading the nibble from the key itself, we don't need to do - // any more checks on it. The key and pointer will be given by the verifier so - // attacker can't indicate a different nibble - let nibble = key.current_nibble(b); - - // we advance the pointer for the next iteration - let new_key = key.advance_by(b, one); - let nibble_header = rlp_headers.select(b, nibble); - let branch_child_hash = node.extract_array::(b, nibble_header.offset); - (new_key, branch_child_hash, branch_condition, nibble) - } +/// This function advances the pointer of the MPT key. The parameters are: +/// * The key where to lookup the next nibble and thus the hash stored at +/// nibble position in the branch node. +/// * RLP headers of the current node. +/// And it returns: +/// * New key with the pointer moved. +/// * The child hash / value of the node. +/// * A boolean that must be true if the given node is a leaf or an extension. +/// * The nibble position before this advance. +pub fn advance_key_branch< + F: RichField + Extendable, + const D: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, +>( + b: &mut CircuitBuilder, + node: &Array, + key: &MPTKeyWireGeneric, + rlp_headers: &RlpList, +) -> ( + MPTKeyWireGeneric, + Array, + BoolTarget, + Target, +) { + let one = b.one(); + // assume it's a node and return the boolean condition that must be true if + // it is a node - decided in advance_key function + let seventeen = b.constant(F::from_canonical_usize(MAX_ITEMS_IN_LIST)); + let branch_condition = b.is_equal(seventeen, rlp_headers.num_fields); - /// Returns the key with the pointer moved, returns the child hash / value of the node, - /// and returns booleans that must be true IF the given node is a leaf or an extension. - pub fn advance_key_leaf_or_extension< - F: RichField + Extendable, - const D: usize, - const LIST_LEN: usize, - // in case of a leaf, the value can be up to 33 bytes because of additional RLP encoding - // in case of extension, the value is 32 bytes - const VALUE_LEN: usize, - >( - b: &mut CircuitBuilder, - node: &Array, - key: &MPTKeyWire, - rlp_headers: &RlpList, - ) -> (MPTKeyWire, Array, BoolTarget) { - let two = b.two(); - let condition = b.is_equal(rlp_headers.num_fields, two); - let key_header = RlpHeader { - data_type: rlp_headers.data_type[0], - offset: rlp_headers.offset[0], - len: rlp_headers.len[0], - }; - let (extracted_key, should_true) = decode_compact_encoding(b, node, &key_header); - // it's either the _value_ of the leaf, OR the _hash_ of the child node if node = ext. - let leaf_child_hash = node.extract_array::(b, rlp_headers.offset[1]); - // note we are going _backwards_ on the key, so we need to substract the expected key length - // we want to check against - let new_key = key.advance_by(b, extracted_key.real_len); - // NOTE: there is no need to check if the extracted_key is indeed a subvector of the full key - // in this case. Indeed, in leaf/ext. there is only one key possible. Since we decoded it - // from the beginning of the node, and that the hash of the node also starts at the beginning, - // either the attacker give the right node or it gives an invalid node and hashes will not - // match. - let condition = b.and(condition, should_true); - (new_key, leaf_child_hash, condition) - } + // Given we are reading the nibble from the key itself, we don't need to do + // any more checks on it. The key and pointer will be given by the verifier so + // attacker can't indicate a different nibble + let nibble = key.current_nibble(b); + + // we advance the pointer for the next iteration + let new_key = key.advance_by(b, one); + let nibble_header = rlp_headers.select(b, nibble); + let branch_child_hash = node.extract_array::(b, nibble_header.offset); + (new_key, branch_child_hash, branch_condition, nibble) } +/// Returns the key with the pointer moved, returns the child hash / value of the node, +/// and returns booleans that must be true IF the given node is a leaf or an extension. +pub fn advance_key_leaf_or_extension< + F: RichField + Extendable, + const D: usize, + const LIST_LEN: usize, + // in case of a leaf, the value can be up to 33 bytes because of additional RLP encoding + // in case of extension, the value is 32 bytes + const VALUE_LEN: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, +>( + b: &mut CircuitBuilder, + node: &Array, + key: &MPTKeyWireGeneric, + rlp_headers: &RlpList, +) -> ( + MPTKeyWireGeneric, + Array, + BoolTarget, +) { + let two = b.two(); + let condition = b.is_equal(rlp_headers.num_fields, two); + let key_header = RlpHeader { + data_type: rlp_headers.data_type[0], + offset: rlp_headers.offset[0], + len: rlp_headers.len[0], + }; + let (extracted_key, should_true) = + decode_compact_encoding::<_, _, _, KEY_LEN>(b, node, &key_header); + // it's either the _value_ of the leaf, OR the _hash_ of the child node if node = ext. + let leaf_child_hash = node.extract_array::(b, rlp_headers.offset[1]); + // note we are going _backwards_ on the key, so we need to substract the expected key length + // we want to check against + let new_key = key.advance_by(b, extracted_key.real_len); + // NOTE: there is no need to check if the extracted_key is indeed a subvector of the full key + // in this case. Indeed, in leaf/ext. there is only one key possible. Since we decoded it + // from the beginning of the node, and that the hash of the node also starts at the beginning, + // either the attacker give the right node or it gives an invalid node and hashes will not + // match. + let condition = b.and(condition, should_true); + (new_key, leaf_child_hash, condition) +} #[cfg(test)] mod test { use std::array::from_fn as create_array; @@ -428,31 +472,43 @@ mod test { use plonky2_crypto::u32::arithmetic_u32::U32Target; use rand::{thread_rng, RngCore}; - use crate::keccak::{HASH_LEN, PACKED_HASH_LEN}; - use crate::rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}; use crate::utils::{Endianness, PackerTarget}; use crate::{ array::Array, utils::{find_index_subvector, keccak256}, }; use crate::{eth::ProofQuery, C, D, F}; + use crate::{ + keccak::{HASH_LEN, PACKED_HASH_LEN}, + mpt_sequential::advance_key_leaf_or_extension, + }; + use crate::{ + mpt_sequential::advance_key_branch, + rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}, + }; use super::{ utils::{bytes_to_nibbles, nibbles_to_bytes, visit_node, visit_proof}, - Circuit, InputWires, MPTKeyWire, OutputWires, MAX_LEAF_VALUE_LEN, NB_ITEMS_LEAF, PAD_LEN, + Circuit, InputWires, MPTKeyWire, OutputWires, MAX_LEAF_VALUE_LEN, NB_ITEMS_LEAF, + NIBBLES_TO_BYTES, PAD_LEN, }; #[derive(Clone, Debug)] - struct TestCircuit { - c: Circuit, + struct TestCircuit< + const DEPTH: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, + const KEY_LEN_BYTES: usize = { NIBBLES_TO_BYTES(KEY_LEN) }, + > { + c: Circuit, exp_root: [u8; 32], exp_value: [u8; MAX_LEAF_VALUE_LEN], // The flag identifies if need to check the expected leaf value, it's // set to true for storage proof, and false for state proof (unconcern). checking_value: bool, } - impl UserCircuit - for TestCircuit + impl + UserCircuit for TestCircuit where F: RichField + Extendable, [(); PAD_LEN(NODE_LEN)]:, @@ -461,7 +517,7 @@ mod test { [(); HASH_LEN / 4]:, { type Wires = ( - InputWires, + InputWires, OutputWires, Array, // root Array, // value @@ -531,12 +587,16 @@ mod test { // Written as constant from ^ const DEPTH: usize = 2; const NODE_LEN: usize = 150; - verify_storage_proof_from_query::(&query, &res)?; + verify_storage_proof_from_query::(&query, &res)?; verify_state_proof_from_query(&query, &res) } /// Verify the storage proof from query result. - pub(crate) fn verify_storage_proof_from_query( + pub(crate) fn verify_storage_proof_from_query< + const DEPTH: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, + >( query: &ProofQuery, res: &EIP1186AccountProofResponse, ) -> Result<()> @@ -544,6 +604,7 @@ mod test { [(); PAD_LEN(NODE_LEN)]:, [(); DEPTH - 1]:, [(); PAD_LEN(NODE_LEN) / 4]:, + [(); NIBBLES_TO_BYTES(KEY_LEN)]:, { ProofQuery::verify_storage_proof(res)?; @@ -568,8 +629,8 @@ mod test { let u8idx = find_index_subvector(&mpt_proof[i], &child_hash); assert!(u8idx.is_some()); } - let circuit = TestCircuit:: { - c: Circuit::::new(mpt_key.try_into().unwrap(), mpt_proof), + let circuit = TestCircuit:: { + c: Circuit::::new(mpt_key.try_into().unwrap(), mpt_proof), exp_root: root.try_into().unwrap(), exp_value: encoded_value.try_into().unwrap(), checking_value: false, @@ -608,8 +669,11 @@ mod test { let u8idx = find_index_subvector(&mpt_proof[i], &child_hash); assert!(u8idx.is_some()); } - let circuit = TestCircuit:: { - c: Circuit::::new(mpt_key.try_into().unwrap(), mpt_proof), + let circuit = TestCircuit:: { + c: Circuit::::new( + mpt_key.try_into().unwrap(), + mpt_proof, + ), exp_root: root.try_into().unwrap(), exp_value: [0; MAX_LEAF_VALUE_LEN], // the reason we don't check the value is the circuit is made for storage proof and it extracts a 32bytes @@ -665,8 +729,8 @@ mod test { let u8idx = find_index_subvector(&proof[i], &child_hash); assert!(u8idx.is_some()); } - let circuit = TestCircuit:: { - c: Circuit::::new(key.try_into().unwrap(), proof), + let circuit = TestCircuit:: { + c: Circuit::::new(key.try_into().unwrap(), proof), exp_root: root, // simply pad it to max size exp_value: create_array(|i| if i < VALUE_LEN { value[i] } else { 0 }), @@ -753,7 +817,9 @@ mod test { let node = Array::::new(&mut b); let key_wire = MPTKeyWire::new(&mut b); let (advanced_key, value, valid_node) = - Circuit::::advance_key(&mut b, &node, &key_wire); + Circuit::::advance_key( + &mut b, &node, &key_wire, + ); b.connect(tr.target, valid_node.target); let exp_key_ptr = b.add_virtual_target(); b.connect(advanced_key.pointer, exp_key_ptr); @@ -864,12 +930,13 @@ mod test { let key_wire = MPTKeyWire::new(&mut builder); let rlp_headers = decode_fixed_list::(&mut builder, &node.arr, zero); - let (advanced_key, value, should_true, _) = Circuit::::advance_key_branch( - &mut builder, - &node, - &key_wire, - &rlp_headers, - ); + let (advanced_key, value, should_true, _) = + advance_key_branch::<_, _, NODE_LEN, MAX_KEY_NIBBLE_LEN>( + &mut builder, + &node, + &key_wire, + &rlp_headers, + ); builder.connect(tt.target, should_true.target); let exp_key_ptr = builder.add_virtual_target(); builder.connect(advanced_key.pointer, exp_key_ptr); @@ -935,7 +1002,7 @@ mod test { let key_wire = MPTKeyWire::new(&mut builder); let rlp_headers = decode_fixed_list::(&mut builder, &node.arr, zero); let (advanced_key, value, should_true) = - Circuit::::advance_key_leaf_or_extension( + advance_key_leaf_or_extension::<_, _, _, _, NODE_LEN, MAX_KEY_NIBBLE_LEN>( &mut builder, &node, &key_wire, diff --git a/mp2-common/src/rlp.rs b/mp2-common/src/rlp.rs index 741f9e38e..3c50eb8cc 100644 --- a/mp2-common/src/rlp.rs +++ b/mp2-common/src/rlp.rs @@ -58,11 +58,16 @@ impl RlpList { } } } -pub fn decode_compact_encoding, const D: usize, const N: usize>( +pub fn decode_compact_encoding< + F: RichField + Extendable, + const D: usize, + const N: usize, + const KEY_LEN: usize, +>( b: &mut CircuitBuilder, input: &Array, key_header: &RlpHeader, -) -> (VectorWire, BoolTarget) { +) -> (VectorWire, BoolTarget) { let zero = b.zero(); let two = b.two(); let first_byte = input.value_at(b, key_header.offset); @@ -71,7 +76,7 @@ pub fn decode_compact_encoding, const D: usize, con let mut prev_nibbles = (least_bits, most_bits); let mut cur_nibbles: (Target, Target); - let mut nibbles: [Target; MAX_KEY_NIBBLE_LEN] = [b.zero(); MAX_KEY_NIBBLE_LEN]; + let mut nibbles: [Target; KEY_LEN] = [b.zero(); KEY_LEN]; let first_nibble = prev_nibbles.0; let first_nibble_as_bits = num_to_bits(b, 4, first_nibble); @@ -92,7 +97,10 @@ pub fn decode_compact_encoding, const D: usize, con // during the first iteration of this loop. let one = b.one(); let mut i_offset = key_header.offset; - for i in 0..MAX_ENC_KEY_LEN - 1 { + + // We calculate how many times to run the foor loop, this is only depends on + // KEY_LEN, since we skip one byte it is just KEY_LEN / 2. + for i in 0..KEY_LEN / 2 { i_offset = b.add(i_offset, one); // look now at the encoded path let x = input.value_at(b, i_offset); @@ -355,7 +363,7 @@ mod tests { use crate::array::Array; use crate::rlp::{ decode_compact_encoding, decode_fixed_list, decode_header, RlpHeader, MAX_ENC_KEY_LEN, - MAX_LEN_BYTES, + MAX_KEY_NIBBLE_LEN, MAX_LEN_BYTES, }; use crate::utils::{keccak256, less_than_or_equal_to, IntTargetWriter}; use crate::{C, D, F}; @@ -792,7 +800,11 @@ mod tests { len: builder.constant(F::from_canonical_usize(tc.key_len)), data_type: builder.constant(F::from_canonical_usize(0)), }; - let (nibbles, cond) = decode_compact_encoding(&mut builder, &wire1, &key_header); + let (nibbles, cond) = decode_compact_encoding::<_, _, _, MAX_KEY_NIBBLE_LEN>( + &mut builder, + &wire1, + &key_header, + ); builder.assert_bool(cond); let exp_nib_len = builder.constant(F::from_canonical_usize(tc.expected.len())); builder.connect(nibbles.real_len, exp_nib_len); diff --git a/mp2-test/Cargo.toml b/mp2-test/Cargo.toml index e4fd7ddbb..a2341668d 100644 --- a/mp2-test/Cargo.toml +++ b/mp2-test/Cargo.toml @@ -13,6 +13,7 @@ plonky2.workspace = true plonky2_ecgfp5.workspace = true rand.workspace = true serde.workspace = true +tokio.workspace = true mp2_common = { path = "../mp2-common" } recursion_framework = { path = "../recursion-framework" } diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 97a64dfb2..d1e79caa1 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -1,6 +1,17 @@ +use alloy::{ + eips::BlockNumberOrTag, + node_bindings::Anvil, + primitives::U256, + providers::{ext::AnvilApi, Provider, ProviderBuilder, RootProvider, WalletProvider}, + rpc::types::Transaction, + sol, +}; use eth_trie::{EthTrie, MemoryDB, Trie}; + +use mp2_common::eth::{ReceiptProofInfo, ReceiptQuery}; use rand::{thread_rng, Rng}; use std::sync::Arc; +use tokio::task::JoinSet; /// Simply the maximum number of nibbles a key can have. const MAX_KEY_NIBBLE_LEN: usize = 64; @@ -39,3 +50,144 @@ pub fn generate_random_storage_mpt( } (trie, keys[right_key_idx].to_vec()) } + +/// This function is used so that we can generate a Receipt Trie for a blog with varying transactions +/// (i.e. some we are interested in and some we are not). +fn generate_receipt_proofs() -> Vec { + // Make a contract that emits events so we can pick up on them + sol! { + #[allow(missing_docs)] + // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780638381f58a14610058578063d09de08a14610076578063db73227914610080575b5f80fd5b61005661008a565b005b6100606100f8565b60405161006d9190610165565b60405180910390f35b61007e6100fd565b005b610088610115565b005b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100c06100fd565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a26100f66100fd565b565b5f5481565b5f8081548092919061010e906101ab565b9190505550565b5f547fdcd9c7fa0342f01013bd0bf2bec103a81936162dcebd1f0c38b1d4164c17e0fc60405160405180910390a261014b6100fd565b565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212202787ca0f2ea71e118bc4d1bf239cde5ec4730aeb35a404c44e6c9d587316418564736f6c634300081a0033")] + contract EventEmitter { + uint256 public number; + event testEvent(uint256 indexed num); + + function testEmit() public { + emit testEvent(number); + increment(); + } + + function twoEmits() public { + emit testEvent(number); + increment(); + emit testEvent(number); + increment(); + } + + function increment() public { + number++; + } + } + } + + sol! { + #[allow(missing_docs)] + // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780637229db15146100585780638381f58a14610062578063d09de08a14610080575b5f80fd5b61005661008a565b005b6100606100f8565b005b61006a610130565b6040516100779190610165565b60405180910390f35b610088610135565b005b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100c0610135565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100f6610135565b565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a261012e610135565b565b5f5481565b5f80815480929190610146906101ab565b9190505550565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212203b7602644bfff2df89c2fe9498cd533326876859a0df7b96ac10be1fdc09c3a064736f6c634300081a0033")] + + contract OtherEmitter { + uint256 public number; + event otherEvent(uint256 indexed num); + + function otherEmit() public { + emit otherEvent(number); + increment(); + } + + function twoEmits() public { + emit otherEvent(number); + increment(); + emit otherEvent(number); + increment(); + } + + function increment() public { + number++; + } + } + } + + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(async { + // Spin up a local node. + + let rpc = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_wallet_and_config(|a| Anvil::block_time(a, 1)); + + // Deploy the contract using anvil + let event_contract = EventEmitter::deploy(rpc.clone()).await.unwrap(); + + // Deploy the contract using anvil + let other_contract = OtherEmitter::deploy(rpc.clone()).await.unwrap(); + + let address = rpc.default_signer_address(); + rpc.anvil_set_nonce(address, U256::from(0)).await.unwrap(); + let tx_reqs = (0..25) + .map(|i| match i % 4 { + 0 => event_contract + .testEmit() + .into_transaction_request() + .nonce(i as u64), + 1 => event_contract + .twoEmits() + .into_transaction_request() + .nonce(i as u64), + 2 => other_contract + .otherEmit() + .into_transaction_request() + .nonce(i as u64), + 3 => other_contract + .twoEmits() + .into_transaction_request() + .nonce(i as u64), + _ => unreachable!(), + }) + .collect::>(); + let mut join_set = JoinSet::new(); + tx_reqs.into_iter().for_each(|tx_req| { + let rpc_clone = rpc.clone(); + join_set.spawn(async move { + rpc_clone + .send_transaction(tx_req) + .await + .unwrap() + .watch() + .await + .unwrap() + }); + }); + + let hashes = join_set.join_all().await; + let mut transactions = Vec::new(); + for hash in hashes.into_iter() { + transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); + } + + let block_number = transactions.first().unwrap().block_number.unwrap(); + + // We want to get the event signature so we can make a ReceiptQuery + let all_events = EventEmitter::abi::events(); + + let events = all_events.get("testEvent").unwrap(); + let receipt_query = ReceiptQuery::new(*event_contract.address(), events[0].clone()); + + receipt_query + .query_receipt_proofs(&rpc.root(), BlockNumberOrTag::Number(block_number)) + .await + .unwrap() + }) +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn tester() { + let receipt_proofs = generate_receipt_proofs(); + for proof in receipt_proofs.iter() { + println!("proof: {}", proof.tx_index); + } + } +} diff --git a/mp2-v1/src/contract_extraction/branch.rs b/mp2-v1/src/contract_extraction/branch.rs index ff27d2147..b78e7edfa 100644 --- a/mp2-v1/src/contract_extraction/branch.rs +++ b/mp2-v1/src/contract_extraction/branch.rs @@ -5,7 +5,7 @@ use anyhow::Result; use mp2_common::{ array::{Array, Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires, PACKED_HASH_LEN}, - mpt_sequential::{Circuit as MPTCircuit, PAD_LEN}, + mpt_sequential::{advance_key_branch, PAD_LEN}, public_inputs::PublicInputCommon, rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST}, types::{CBuilder, GFp}, @@ -54,12 +54,14 @@ where // validity of the hash exposed by the proofs. let headers = decode_fixed_list::<_, D, MAX_ITEMS_IN_LIST>(b, &node.arr.arr, zero); - let (new_mpt_key, hash, is_valid, _) = MPTCircuit::<1, NODE_LEN>::advance_key_branch( - b, - &node.arr, - &child_proof.mpt_key(), - &headers, - ); + let (new_mpt_key, hash, is_valid, _) = + // MPTCircuit::<1, NODE_LEN, MAX_KEY_NIBBLE_LEN> + advance_key_branch( + b, + &node.arr, + &child_proof.mpt_key(), + &headers, + ); // We always enforce it's a branch node, i.e. that it has 17 entries. b.connect(is_valid.target, ttrue.target); @@ -111,7 +113,7 @@ where _builder_parameters: Self::CircuitBuilderParams, ) -> Self { let inputs = PublicInputs::from_slice(&verified_proofs[0].public_inputs); - BranchCircuit::build(builder, inputs) + BranchCircuit::<_>::build(builder, inputs) } fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> Result<()> { diff --git a/mp2-v1/src/length_extraction/branch.rs b/mp2-v1/src/length_extraction/branch.rs index 157f0b590..680ecdcba 100644 --- a/mp2-v1/src/length_extraction/branch.rs +++ b/mp2-v1/src/length_extraction/branch.rs @@ -5,9 +5,9 @@ use core::array; use mp2_common::{ array::{Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires, PACKED_HASH_LEN}, - mpt_sequential::Circuit as MPTCircuit, + mpt_sequential::advance_key_branch, public_inputs::PublicInputCommon, - rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST}, + rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}, types::{CBuilder, GFp}, utils::{Endianness, PackerTarget}, D, @@ -79,7 +79,9 @@ impl BranchLengthCircuit { let key = child_proof.mpt_key_wire(); let (key, hash, is_branch, _) = - MPTCircuit::<1, MAX_BRANCH_NODE_LEN>::advance_key_branch(cb, &node.arr, &key, &headers); + advance_key_branch::<_, D, MAX_BRANCH_NODE_LEN, MAX_KEY_NIBBLE_LEN>( + cb, &node.arr, &key, &headers, + ); // asserts this is a branch node cb.assert_one(is_branch.target); diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index 3e9cb8414..2c3b0bc95 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -25,4 +25,5 @@ pub mod final_extraction; pub mod indexing; pub mod length_extraction; pub mod query; +pub mod receipt_extraction; pub mod values_extraction; diff --git a/mp2-v1/src/receipt_extraction/leaf.rs b/mp2-v1/src/receipt_extraction/leaf.rs new file mode 100644 index 000000000..f7c99d8a7 --- /dev/null +++ b/mp2-v1/src/receipt_extraction/leaf.rs @@ -0,0 +1,510 @@ +//! Module handling the leaf node inside a Receipt Trie + +use super::public_inputs::PublicInputArgs; + +use mp2_common::{ + array::{Array, Vector, VectorWire}, + eth::{EventLogInfo, LogDataInfo, ReceiptProofInfo}, + group_hashing::CircuitBuilderGroupHashing, + keccak::{InputData, KeccakCircuit, KeccakWires}, + mpt_sequential::{ + MPTLeafOrExtensionNodeGeneric, ReceiptKeyWire, MAX_RECEIPT_LEAF_VALUE_LEN, + MAX_TX_KEY_NIBBLE_LEN, PAD_LEN, + }, + poseidon::H, + public_inputs::PublicInputCommon, + types::{CBuilder, GFp}, + utils::{Endianness, PackerTarget}, + D, F, +}; +use plonky2::{ + field::types::Field, + iop::{ + target::Target, + witness::{PartialWitness, WitnessWrite}, + }, +}; + +use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; + +use rlp::Encodable; +use serde::{Deserialize, Serialize}; + +/// Maximum number of logs per transaction we can process +const MAX_LOGS_PER_TX: usize = 2; + +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub struct ReceiptLeafWires +where + [(); PAD_LEN(NODE_LEN)]:, +{ + /// The event we are monitoring for + pub event: EventWires, + /// The node bytes + pub node: VectorWire, + /// The actual value stored in the node + pub value: Array, + /// the hash of the node bytes + pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + /// The offset of the status of the transaction in the RLP encoded receipt node. + pub status_offset: Target, + /// The offsets of the relevant logs inside the node + pub relevant_logs_offset: VectorWire, + /// The key in the MPT Trie + pub mpt_key: ReceiptKeyWire, +} + +/// Contains all the information for an [`Event`] in rlp form +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct EventWires { + /// Size in bytes of the whole event + size: Target, + /// Packed contract address to check + address: Array, + /// Byte offset for the address from the beginning of a Log + add_rel_offset: Target, + /// Packed event signature, + event_signature: Array, + /// Byte offset from the start of the log to event signature + sig_rel_offset: Target, + /// The topics for this Log + topics: [LogColumn; 3], + /// The extra data stored by this Log + data: [LogColumn; 2], +} + +/// Contains all the information for a [`Log`] in rlp form +#[derive(Debug, Clone, Serialize, Deserialize, Copy, PartialEq, Eq)] +pub struct LogColumn { + column_id: Target, + /// The byte offset from the beggining of the log to this target + rel_byte_offset: Target, + /// The length of this topic/data + len: Target, +} + +impl LogColumn { + /// Convert to an array for metadata digest + pub fn to_array(&self) -> [Target; 3] { + [self.column_id, self.rel_byte_offset, self.len] + } + + /// Assigns a log colum from a [`LogDataInfo`] + pub fn assign(&self, pw: &mut PartialWitness, data: LogDataInfo) { + pw.set_target(self.column_id, F::from_canonical_usize(data.column_id)); + pw.set_target( + self.rel_byte_offset, + F::from_canonical_usize(data.rel_byte_offset), + ); + pw.set_target(self.len, F::from_canonical_usize(data.len)); + } +} + +impl EventWires { + /// Convert to an array for metadata digest + pub fn to_slice(&self) -> [Target; 70] { + let topics_flat = self + .topics + .iter() + .flat_map(|t| t.to_array()) + .collect::>(); + let data_flat = self + .data + .iter() + .flat_map(|t| t.to_array()) + .collect::>(); + let mut out = [Target::default(); 70]; + out[0] = self.size; + out.iter_mut() + .skip(1) + .take(20) + .enumerate() + .for_each(|(i, entry)| *entry = self.address.arr[i]); + out[21] = self.add_rel_offset; + out.iter_mut() + .skip(22) + .take(32) + .enumerate() + .for_each(|(i, entry)| *entry = self.event_signature.arr[i]); + out[54] = self.sig_rel_offset; + out.iter_mut() + .skip(55) + .take(9) + .enumerate() + .for_each(|(i, entry)| *entry = topics_flat[i]); + out.iter_mut() + .skip(64) + .take(6) + .enumerate() + .for_each(|(i, entry)| *entry = data_flat[i]); + out + } + + pub fn verify_logs_and_extract_values( + &self, + b: &mut CBuilder, + value: &Array, + status_offset: Target, + relevant_logs_offsets: &VectorWire, + ) -> CurveTarget { + let t = b._true(); + let zero = b.zero(); + let curve_zero = b.curve_zero(); + let mut value_digest = b.curve_zero(); + + // Enforce status is true. + let status = value.random_access_large_array(b, status_offset); + b.connect(status, t.target); + + for log_offset in relevant_logs_offsets.arr.arr { + // Extract the address bytes + let address_start = b.add(log_offset, self.add_rel_offset); + + let address_bytes = value.extract_array_large::<_, _, 20>(b, address_start); + + let address_check = address_bytes.equals(b, &self.address); + // Extract the signature bytes + let sig_start = b.add(log_offset, self.sig_rel_offset); + + let sig_bytes = value.extract_array_large::<_, _, 32>(b, sig_start); + + let sig_check = sig_bytes.equals(b, &self.event_signature); + + // We check to see if the relevant log offset is zero (this indicates a dummy value) + let dummy = b.is_equal(log_offset, zero); + + let address_to_enforce = b.select(dummy, t.target, address_check.target); + let sig_to_enforce = b.select(dummy, t.target, sig_check.target); + + b.connect(t.target, address_to_enforce); + b.connect(t.target, sig_to_enforce); + + for &log_column in self.topics.iter().chain(self.data.iter()) { + let data_start = b.add(log_offset, log_column.rel_byte_offset); + // The data is always 32 bytes long + let data_bytes = value.extract_array_large::<_, _, 32>(b, data_start); + + // Pack the data and get the digest + let packed_data = data_bytes.arr.pack(b, Endianness::Big); + let data_digest = b.map_to_curve_point( + &std::iter::once(log_column.column_id) + .chain(packed_data) + .collect::>(), + ); + + // For each column we use the `column_id` field to tell if its a dummy or not, zero indicates a dummy. + let dummy_column = b.is_equal(log_column.column_id, zero); + let selector = b.and(dummy_column, dummy); + + let selected_point = b.select_curve_point(selector, curve_zero, data_digest); + value_digest = b.add_curve_point(&[selected_point, value_digest]); + } + } + + value_digest + } +} + +/// Circuit to prove the correct derivation of the MPT key from a simple slot +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReceiptLeafCircuit { + pub(crate) info: ReceiptProofInfo, +} + +impl ReceiptLeafCircuit +where + [(); PAD_LEN(NODE_LEN)]:, +{ + pub fn build_leaf_wires(b: &mut CBuilder) -> ReceiptLeafWires { + // Build the event wires + let event_wires = Self::build_event_wires(b); + + // Add targets for the data specific to this receipt + let index = b.add_virtual_target(); + let status_offset = b.add_virtual_target(); + let relevant_logs_offset = VectorWire::::new(b); + + let mpt_key = ReceiptKeyWire::new(b); + + // Build the node wires. + let wires = MPTLeafOrExtensionNodeGeneric::build_and_advance_key::< + _, + D, + NODE_LEN, + MAX_RECEIPT_LEAF_VALUE_LEN, + >(b, &mpt_key); + let node = wires.node; + let root = wires.root; + + // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for + let receipt_body = wires.value; + let mut dv = event_wires.verify_logs_and_extract_values( + b, + &receipt_body, + status_offset, + &relevant_logs_offset, + ); + let value_id = b.map_to_curve_point(&[index]); + dv = b.add_curve_point(&[value_id, dv]); + + let dm = b.hash_n_to_hash_no_pad::(event_wires.to_slice().to_vec()); + + // Register the public inputs + PublicInputArgs { + h: &root.output_array, + k: &wires.key, + dv, + dm, + } + .register_args(b); + + ReceiptLeafWires { + event: event_wires, + node, + value: receipt_body, + root, + status_offset, + relevant_logs_offset, + mpt_key, + } + } + + fn build_event_wires(b: &mut CBuilder) -> EventWires { + let size = b.add_virtual_target(); + + // Packed address + let arr = [b.add_virtual_target(); 20]; + let address = Array::from_array(arr); + + // relative offset of the address + let add_rel_offset = b.add_virtual_target(); + + // Event signature + let arr = [b.add_virtual_target(); 32]; + let event_signature = Array::from_array(arr); + + // Signature relative offset + let sig_rel_offset = b.add_virtual_target(); + + // topics + let topics = [Self::build_log_column(b); 3]; + + // data + let data = [Self::build_log_column(b); 2]; + + EventWires { + size, + address, + add_rel_offset, + event_signature, + sig_rel_offset, + topics, + data, + } + } + + fn build_log_column(b: &mut CBuilder) -> LogColumn { + let column_id = b.add_virtual_target(); + let rel_byte_offset = b.add_virtual_target(); + let len = b.add_virtual_target(); + + LogColumn { + column_id, + rel_byte_offset, + len, + } + } + + pub fn assign(&self, pw: &mut PartialWitness, wires: &ReceiptLeafWires) { + self.assign_event_wires(pw, &wires.event); + + let node = self + .info + .mpt_proof + .last() + .expect("Receipt MPT proof had no nodes"); + let pad_node = + Vector::::from_vec(node).expect("invalid node given"); + wires.node.assign(pw, &pad_node); + KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::assign( + pw, + &wires.root, + &InputData::Assigned(&pad_node), + ); + + pw.set_target( + wires.status_offset, + GFp::from_canonical_usize(self.info.status_offset), + ); + + let relevant_logs_vector = + Vector::::from_vec(&self.info.relevant_logs_offset) + .expect("Could not assign relevant logs offsets"); + wires.relevant_logs_offset.assign(pw, &relevant_logs_vector); + + let key_encoded = self.info.tx_index.rlp_bytes(); + let nibbles = key_encoded + .iter() + .flat_map(|byte| [byte / 16, byte % 16]) + .collect::>(); + + let mut key_nibbles = [0u8; MAX_TX_KEY_NIBBLE_LEN]; + key_nibbles + .iter_mut() + .enumerate() + .for_each(|(index, nibble)| { + if index < nibbles.len() { + *nibble = nibbles[index] + } + }); + + wires.mpt_key.assign(pw, &key_nibbles, self.info.index_size); + } + + pub fn assign_event_wires(&self, pw: &mut PartialWitness, wires: &EventWires) { + let EventLogInfo { + size, + address, + add_rel_offset, + event_signature, + sig_rel_offset, + topics, + data, + } = self.info.event_log_info; + + pw.set_target(wires.size, F::from_canonical_usize(size)); + + wires + .address + .assign(pw, &address.0.map(|byte| GFp::from_canonical_u8(byte))); + + pw.set_target( + wires.add_rel_offset, + F::from_canonical_usize(add_rel_offset), + ); + + wires.event_signature.assign( + pw, + &event_signature.map(|byte| GFp::from_canonical_u8(byte)), + ); + + pw.set_target( + wires.sig_rel_offset, + F::from_canonical_usize(sig_rel_offset), + ); + + wires + .topics + .iter() + .zip(topics.into_iter()) + .for_each(|(topic_wire, topic)| topic_wire.assign(pw, topic)); + wires + .data + .iter() + .zip(data.into_iter()) + .for_each(|(data_wire, data)| data_wire.assign(pw, data)); + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[derive(Clone, Debug)] + struct TestReceiptLeafCircuit { + c: ReceiptLeafCircuit, + exp_value: Vec, + } + + impl UserCircuit for TestReceiptLeafCircuit + where + [(); PAD_LEN(NODE_LEN)]:, + { + // Leaf wires + expected extracted value + type Wires = ( + ReceiptLeafWires, + Array, + ); + + fn build(b: &mut CircuitBuilder) -> Self::Wires { + let exp_value = Array::::new(b); + + let leaf_wires = ReceiptLeafCircuit::::build(b); + leaf_wires.value.enforce_equal(b, &exp_value); + + (leaf_wires, exp_value) + } + + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + self.c.assign(pw, &wires.0); + wires + .1 + .assign_bytes(pw, &self.exp_value.clone().try_into().unwrap()); + } + } + #[test] + fn test_leaf_circuit() { + const NODE_LEN: usize = 80; + + let simple_slot = 2_u8; + let slot = StorageSlot::Simple(simple_slot as usize); + let contract_address = Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(); + let chain_id = 10; + let id = identifier_single_var_column(simple_slot, &contract_address, chain_id, vec![]); + + let (mut trie, _) = generate_random_storage_mpt::<3, MAPPING_LEAF_VALUE_LEN>(); + let value = random_vector(MAPPING_LEAF_VALUE_LEN); + let encoded_value: Vec = rlp::encode(&value).to_vec(); + // assert we added one byte of RLP header + assert_eq!(encoded_value.len(), MAPPING_LEAF_VALUE_LEN + 1); + println!("encoded value {:?}", encoded_value); + trie.insert(&slot.mpt_key(), &encoded_value).unwrap(); + trie.root_hash().unwrap(); + + let proof = trie.get_proof(&slot.mpt_key_vec()).unwrap(); + let node = proof.last().unwrap().clone(); + + let c = LeafSingleCircuit:: { + node: node.clone(), + slot: SimpleSlot::new(simple_slot), + id, + }; + let test_circuit = TestLeafSingleCircuit { + c, + exp_value: value.clone(), + }; + + let proof = run_circuit::(test_circuit); + let pi = PublicInputs::new(&proof.public_inputs); + + { + let exp_hash = keccak256(&node).pack(Endianness::Little); + assert_eq!(pi.root_hash(), exp_hash); + } + { + let (key, ptr) = pi.mpt_key_info(); + + let exp_key = slot.mpt_key_vec(); + let exp_key: Vec<_> = bytes_to_nibbles(&exp_key) + .into_iter() + .map(F::from_canonical_u8) + .collect(); + assert_eq!(key, exp_key); + + let leaf_key: Vec> = rlp::decode_list(&node); + let nib = Nibbles::from_compact(&leaf_key[0]); + let exp_ptr = F::from_canonical_usize(MAX_KEY_NIBBLE_LEN - 1 - nib.nibbles().len()); + assert_eq!(exp_ptr, ptr); + } + // Check values digest + { + let exp_digest = compute_leaf_single_values_digest(id, &value); + assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); + } + // Check metadata digest + { + let exp_digest = compute_leaf_single_metadata_digest(id, simple_slot); + assert_eq!(pi.metadata_digest(), exp_digest.to_weierstrass()); + } + assert_eq!(pi.n(), F::ONE); + } +} \ No newline at end of file diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs new file mode 100644 index 000000000..6c3803e08 --- /dev/null +++ b/mp2-v1/src/receipt_extraction/mod.rs @@ -0,0 +1,2 @@ +pub mod leaf; +pub mod public_inputs; diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs new file mode 100644 index 000000000..901fc0b29 --- /dev/null +++ b/mp2-v1/src/receipt_extraction/public_inputs.rs @@ -0,0 +1,76 @@ +//! Public inputs for Receipt Extraction circuits + +use mp2_common::{ + keccak::{OutputHash, PACKED_HASH_LEN}, + mpt_sequential::ReceiptKeyWire, + public_inputs::{PublicInputCommon, PublicInputRange}, + types::{CBuilder, CURVE_TARGET_LEN}, +}; +use plonky2::hash::hash_types::{HashOutTarget, NUM_HASH_OUT_ELTS}; + +use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; + +/// The maximum length of a transaction index in a block in nibbles. +/// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 3 bytes to represent. +const MAX_INDEX_NIBBLES: usize = 6; +// Contract extraction public Inputs: +/// - `H : [8]F` : packed node hash +const H_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; +/// - `K : [6]F` : Length of the transaction index in nibbles +const K_RANGE: PublicInputRange = H_RANGE.end..H_RANGE.end + MAX_INDEX_NIBBLES; +/// `T : F` pointer in the MPT indicating portion of the key already traversed (from 6 → 0) +const T_RANGE: PublicInputRange = K_RANGE.end..K_RANGE.end + 1; +/// - `DV : Digest[F]` : value digest of all rows to extract +const DV_RANGE: PublicInputRange = T_RANGE.end..T_RANGE.end + CURVE_TARGET_LEN; +/// - `DM : Digest[F]` : metadata digest to extract +const DM_RANGE: PublicInputRange = DV_RANGE.end..DV_RANGE.end + NUM_HASH_OUT_ELTS; + +/// Public inputs for contract extraction +#[derive(Clone, Debug)] +pub struct PublicInputArgs<'a> { + /// The hash of the node + pub(crate) h: &'a OutputHash, + /// The MPT key + pub(crate) k: &'a ReceiptKeyWire, + /// Digest of the values + pub(crate) dv: CurveTarget, + /// The poseidon hash of the metadata + pub(crate) dm: HashOutTarget, +} + +impl<'a> PublicInputCommon for PublicInputArgs<'a> { + const RANGES: &'static [PublicInputRange] = &[H_RANGE, K_RANGE, T_RANGE, DV_RANGE, DM_RANGE]; + + fn register_args(&self, cb: &mut CBuilder) { + self.generic_register_args(cb) + } +} + +impl<'a> PublicInputArgs<'a> { + /// Create a new public inputs. + pub fn new( + h: &'a OutputHash, + k: &'a ReceiptKeyWire, + dv: CurveTarget, + dm: HashOutTarget, + ) -> Self { + Self { h, k, dv, dm } + } +} + +impl<'a> PublicInputArgs<'a> { + pub fn generic_register_args(&self, cb: &mut CBuilder) { + self.h.register_as_public_input(cb); + self.k.register_as_input(cb); + cb.register_curve_public_input(self.dv); + cb.register_public_inputs(&self.dm.elements); + } + + pub fn digest_value(&self) -> CurveTarget { + self.dv + } + + pub fn digest_metadata(&self) -> HashOutTarget { + self.dm + } +} diff --git a/mp2-v1/src/values_extraction/branch.rs b/mp2-v1/src/values_extraction/branch.rs index afc352859..cdbf117bc 100644 --- a/mp2-v1/src/values_extraction/branch.rs +++ b/mp2-v1/src/values_extraction/branch.rs @@ -5,9 +5,9 @@ use anyhow::Result; use mp2_common::{ array::{Array, Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires, HASH_LEN, PACKED_HASH_LEN}, - mpt_sequential::{Circuit as MPTCircuit, MPTKeyWire, PAD_LEN}, + mpt_sequential::{advance_key_branch, MPTKeyWire, NIBBLES_TO_BYTES, PAD_LEN}, public_inputs::PublicInputCommon, - rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST}, + rlp::{decode_fixed_list, MAX_ITEMS_IN_LIST, MAX_KEY_NIBBLE_LEN}, serialization::{deserialize, serialize}, types::{CBuilder, GFp}, utils::{less_than, Endianness, PackerTarget}, @@ -60,7 +60,10 @@ where pub fn build( b: &mut CBuilder, inputs: &[PublicInputs; N_CHILDREN], - ) -> BranchWires { + ) -> BranchWires + where + [(); NIBBLES_TO_BYTES(MAX_KEY_NIBBLE_LEN)]:, + { let zero = b.zero(); let one = b.one(); let ttrue = b._true(); @@ -129,7 +132,7 @@ where let child_key = proof_inputs.mpt_key(); let (_, hash, is_valid, nibble) = - MPTCircuit::<1, NODE_LEN>::advance_key_branch(b, &node.arr, &child_key, &headers); + advance_key_branch(b, &node.arr, &child_key, &headers); // We always enforce it's a branch node, i.e. that it has 17 entries. b.connect(is_valid.target, ttrue.target); diff --git a/rustc-ice-2024-11-04T12_36_50-74186.txt b/rustc-ice-2024-11-04T12_36_50-74186.txt new file mode 100644 index 000000000..d48781bb7 --- /dev/null +++ b/rustc-ice-2024-11-04T12_36_50-74186.txt @@ -0,0 +1,63 @@ +thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: +const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] +stack backtrace: + 0: 0x11209ec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea + 1: 0x10ff1b468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call + 2: 0x1120b9608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a + 3: 0x1120b9260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 + 4: 0x1120b6e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 + 5: 0x1120b8f24 - _rust_begin_unwind + 6: 0x1147a7ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 + 7: 0x1148ddc1c - >::const_param_out_of_range + 8: 0x110de5ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const + 9: 0x110db651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> + 10: 0x110daa120 - >::super_fold_with::> + 11: 0x110cf9f18 - >::super_fold_with::> + 12: 0x110d70d94 - <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 13: 0x110cf7c2c - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 14: 0x110cf73b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 15: 0x110df372c - >::try_fold_with::> + 16: 0x110dc5a1c - ::instantiate_into + 17: 0x111cc9848 - ::nominal_obligations + 18: 0x111cc8710 - >::visit_const + 19: 0x111cc7b58 - >::visit_ty + 20: 0x111cc5db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations + 21: 0x111e3b15c - ::process_obligation + 22: 0x111e1c724 - >::process_obligations:: + 23: 0x111e383c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible + 24: 0x111c66608 - >::assumed_wf_types_and_report_errors + 25: 0x110376c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed + 26: 0x11160ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 27: 0x1117112e0 - >::call_once + 28: 0x1115abf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 29: 0x111788630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace + 30: 0x11036a5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> + 31: 0x11037d898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf + 32: 0x11160ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 33: 0x111711048 - >::call_once + 34: 0x11156cf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 35: 0x111775ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace + 36: 0x11036534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> + 37: 0x11041513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate + 38: 0x1108bb918 - rustc_interface[6b7e568f89869ca2]::passes::analysis + 39: 0x11160e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 40: 0x1116b2cf0 - >::call_once + 41: 0x11152ae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 42: 0x1117636ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace + 43: 0x10ff66ee0 - ::enter::> + 44: 0x10ff34448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 45: 0x10ff81978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> + 46: 0x10ff7e0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 47: 0x10ff7edb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} + 48: 0x1120c3a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 + 49: 0x18b24ef94 - __pthread_joiner_wake + + +rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) +platform: aarch64-apple-darwin + +query stack during panic: +#0 [check_well_formed] checking that `mpt_sequential::` is well-formed +#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` +#2 [analysis] running analysis passes on this crate +end of query stack diff --git a/rustc-ice-2024-11-04T12_37_01-74253.txt b/rustc-ice-2024-11-04T12_37_01-74253.txt new file mode 100644 index 000000000..6bcecf0f7 --- /dev/null +++ b/rustc-ice-2024-11-04T12_37_01-74253.txt @@ -0,0 +1,62 @@ +thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: +const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] +stack backtrace: + 0: 0x110a2ec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea + 1: 0x10e8ab468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call + 2: 0x110a49608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a + 3: 0x110a49260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 + 4: 0x110a46e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 + 5: 0x110a48f24 - _rust_begin_unwind + 6: 0x113137ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 + 7: 0x11326dc1c - >::const_param_out_of_range + 8: 0x10f775ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const + 9: 0x10f74651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> + 10: 0x10f73a120 - >::super_fold_with::> + 11: 0x10f689f18 - >::super_fold_with::> + 12: 0x10f687ca0 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 13: 0x10f6873b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 14: 0x10f78372c - >::try_fold_with::> + 15: 0x10f755a1c - ::instantiate_into + 16: 0x110659848 - ::nominal_obligations + 17: 0x110658710 - >::visit_const + 18: 0x110657b58 - >::visit_ty + 19: 0x110655db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations + 20: 0x1107cb15c - ::process_obligation + 21: 0x1107ac724 - >::process_obligations:: + 22: 0x1107c83c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible + 23: 0x1105f6608 - >::assumed_wf_types_and_report_errors + 24: 0x10ed06c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed + 25: 0x10ff9ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 26: 0x1100a12e0 - >::call_once + 27: 0x10ff3bf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 28: 0x110118630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace + 29: 0x10ecfa5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> + 30: 0x10ed0d898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf + 31: 0x10ff9ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 32: 0x1100a1048 - >::call_once + 33: 0x10fefcf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 34: 0x110105ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace + 35: 0x10ecf534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> + 36: 0x10eda513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate + 37: 0x10f24b918 - rustc_interface[6b7e568f89869ca2]::passes::analysis + 38: 0x10ff9e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 39: 0x110042cf0 - >::call_once + 40: 0x10febae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 41: 0x1100f36ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace + 42: 0x10e8f6ee0 - ::enter::> + 43: 0x10e8c4448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 44: 0x10e911978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> + 45: 0x10e90e0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 46: 0x10e90edb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} + 47: 0x110a53a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 + 48: 0x18b24ef94 - __pthread_joiner_wake + + +rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) +platform: aarch64-apple-darwin + +query stack during panic: +#0 [check_well_formed] checking that `mpt_sequential::` is well-formed +#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` +#2 [analysis] running analysis passes on this crate +end of query stack diff --git a/rustc-ice-2024-11-04T12_37_13-74307.txt b/rustc-ice-2024-11-04T12_37_13-74307.txt new file mode 100644 index 000000000..6eb26635b --- /dev/null +++ b/rustc-ice-2024-11-04T12_37_13-74307.txt @@ -0,0 +1,62 @@ +thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: +const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] +stack backtrace: + 0: 0x10e1cec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea + 1: 0x10c04b468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call + 2: 0x10e1e9608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a + 3: 0x10e1e9260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 + 4: 0x10e1e6e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 + 5: 0x10e1e8f24 - _rust_begin_unwind + 6: 0x1108d7ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 + 7: 0x110a0dc1c - >::const_param_out_of_range + 8: 0x10cf15ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const + 9: 0x10cee651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> + 10: 0x10ceda120 - >::super_fold_with::> + 11: 0x10ce29f18 - >::super_fold_with::> + 12: 0x10ce27ca0 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 13: 0x10ce273b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> + 14: 0x10cf2372c - >::try_fold_with::> + 15: 0x10cef5a1c - ::instantiate_into + 16: 0x10ddf9848 - ::nominal_obligations + 17: 0x10ddf8710 - >::visit_const + 18: 0x10ddf7b58 - >::visit_ty + 19: 0x10ddf5db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations + 20: 0x10df6b15c - ::process_obligation + 21: 0x10df4c724 - >::process_obligations:: + 22: 0x10df683c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible + 23: 0x10dd96608 - >::assumed_wf_types_and_report_errors + 24: 0x10c4a6c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed + 25: 0x10d73ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 26: 0x10d8412e0 - >::call_once + 27: 0x10d6dbf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 28: 0x10d8b8630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace + 29: 0x10c49a5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> + 30: 0x10c4ad898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf + 31: 0x10d73ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 32: 0x10d841048 - >::call_once + 33: 0x10d69cf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 34: 0x10d8a5ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace + 35: 0x10c49534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> + 36: 0x10c54513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate + 37: 0x10c9eb918 - rustc_interface[6b7e568f89869ca2]::passes::analysis + 38: 0x10d73e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> + 39: 0x10d7e2cf0 - >::call_once + 40: 0x10d65ae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> + 41: 0x10d8936ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace + 42: 0x10c096ee0 - ::enter::> + 43: 0x10c064448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 44: 0x10c0b1978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> + 45: 0x10c0ae0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> + 46: 0x10c0aedb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} + 47: 0x10e1f3a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 + 48: 0x18b24ef94 - __pthread_joiner_wake + + +rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) +platform: aarch64-apple-darwin + +query stack during panic: +#0 [check_well_formed] checking that `mpt_sequential::` is well-formed +#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` +#2 [analysis] running analysis passes on this crate +end of query stack From 6a2d2d87cf2d9c85eeee88e17986db72d13b9f1e Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Fri, 8 Nov 2024 13:36:46 +0000 Subject: [PATCH 05/15] Receipt Leaf Circuit added with tests --- mp2-common/src/array.rs | 8 +- mp2-common/src/eth.rs | 217 ++++++++++------- mp2-common/src/group_hashing/mod.rs | 2 - .../src/mpt_sequential/leaf_or_extension.rs | 62 ++++- mp2-common/src/mpt_sequential/mod.rs | 42 +++- mp2-test/src/circuit.rs | 99 ++++++++ mp2-test/src/mpt_sequential.rs | 17 +- mp2-v1/src/lib.rs | 1 + mp2-v1/src/receipt_extraction/leaf.rs | 221 ++++++++---------- mp2-v1/src/receipt_extraction/mod.rs | 29 +++ .../src/receipt_extraction/public_inputs.rs | 120 ++++++++-- mp2-v1/src/values_extraction/api.rs | 2 +- 12 files changed, 564 insertions(+), 256 deletions(-) diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index 624e5e1bf..38872794c 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -638,7 +638,7 @@ where let (low_bits, high_bits) = b.split_low_high(at, 6, 12); // Search each of the smaller arrays for the target at `low_bits` - let first_search = arrays + let mut first_search = arrays .into_iter() .map(|array| { b.random_access( @@ -652,6 +652,10 @@ where }) .collect::>(); + // Now we push a number of zero targets into the array to make it a power of 2 + let next_power_of_two = first_search.len().next_power_of_two(); + let zero_target = b.zero(); + first_search.resize(next_power_of_two, zero_target); // Serach the result for the Target at `high_bits` T::from_target(b.random_access(high_bits, first_search)) } @@ -683,7 +687,7 @@ where let i_target = b.constant(F::from_canonical_usize(i)); let i_plus_n_target = b.add(at, i_target); - // out_val = arr[((i+n)<=n+M) * (i+n)] + self.random_access_large_array(b, i_plus_n_target) }), } diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 8b0e9226b..04abeb373 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -467,23 +467,42 @@ impl ReceiptQuery { .into_iter() .map(|index| { let key = index.rlp_bytes(); + let index_size = key.len(); - let proof = block_util.receipts_trie.get_proof(&key)?; + + let proof = block_util.receipts_trie.get_proof(&key[..])?; + + // Since the compact encoding of the key is stored first plus an additional list header and + // then the first element in the receipt body is the transaction type we calculate the offset to that point + + let last_node = proof.last().ok_or(eth_trie::TrieError::DB( + "Could not get last node in proof".to_string(), + ))?; + + let list_length_hint = last_node[0] as usize - 247; + let key_length = if last_node[1 + list_length_hint] > 128 { + last_node[1 + list_length_hint] as usize - 128 + } else { + 0 + }; + let body_length_hint = last_node[2 + list_length_hint + key_length] as usize - 183; + let body_offset = 4 + list_length_hint + key_length + body_length_hint; + let receipt = block_util.txs[index as usize].receipt(); - let rlp_body = receipt.encoded_2718(); - // Skip the first byte as it refers to the transaction type - let length_hint = rlp_body[1] as usize - 247; - let status_offset = 2 + length_hint; - let gas_hint = rlp_body[3 + length_hint] as usize - 128; + let body_length_hint = last_node[body_offset] as usize - 247; + let length_hint = body_offset + body_length_hint; + + let status_offset = 1 + length_hint; + let gas_hint = last_node[2 + length_hint] as usize - 128; // Logs bloom is always 256 bytes long and comes after the gas used the first byte is 185 then 1 then 0 then the bloom so the // log data starts at 4 + length_hint + gas_hint + 259 - let log_offset = 4 + length_hint + gas_hint + 259; + let log_offset = 3 + length_hint + gas_hint + 259; - let log_hint = if rlp_body[log_offset] < 247 { - rlp_body[log_offset] as usize - 192 + let log_hint = if last_node[log_offset] < 247 { + last_node[log_offset] as usize - 192 } else { - rlp_body[log_offset] as usize - 247 + last_node[log_offset] as usize - 247 }; // We iterate through the logs and store the offsets we care about. let mut current_log_offset = log_offset + 1 + log_hint; @@ -604,11 +623,7 @@ impl BlockUtil { let body_rlp = receipt_primitive.encoded_2718(); let tx_body_rlp = transaction_primitive.encoded_2718(); - println!( - "TX index {} RLP encoded: {:?}", - receipt.transaction_index.unwrap(), - tx_index.to_vec() - ); + receipts_trie .insert(&tx_index, &body_rlp) .expect("can't insert receipt"); @@ -618,6 +633,8 @@ impl BlockUtil { TxWithReceipt(transaction.clone(), receipt_primitive) }) .collect::>(); + receipts_trie.root_hash()?; + transactions_trie.root_hash()?; Ok(BlockUtil { block, txs: consensus_receipts, @@ -672,11 +689,10 @@ mod tryethers { use ethers::{ providers::{Http, Middleware, Provider}, types::{ - Address, Block, BlockId, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, - H256, U64, + Block, BlockId, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, H256, U64, }, }; - use rlp::{Encodable, Rlp, RlpStream}; + use rlp::{Encodable, RlpStream}; /// A wrapper around a transaction and its receipt. The receipt is used to filter /// bad transactions, so we only compute over valid transactions. @@ -823,8 +839,8 @@ mod test { use alloy::{ node_bindings::Anvil, - primitives::{Bytes, Log}, - providers::ProviderBuilder, + primitives::{Bytes, Log, U256}, + providers::{ext::AnvilApi, Provider, ProviderBuilder, WalletProvider}, rlp::Decodable, sol, }; @@ -835,10 +851,10 @@ mod test { types::BlockNumber, }; use hashbrown::HashMap; + use tokio::task::JoinSet; use crate::{ mpt_sequential::utils::nibbles_to_bytes, - types::MAX_BLOCK_LEN, utils::{Endianness, Packer}, }; use mp2_test::eth::{get_mainnet_url, get_sepolia_url}; @@ -974,14 +990,11 @@ mod test { #[tokio::test] async fn test_receipt_query() -> Result<()> { - // Spin up a local node. - let anvil = Anvil::new().spawn(); - // Create a provider with the wallet for contract deployment and interaction. - let rpc_url = anvil.endpoint(); - - let rpc = ProviderBuilder::new().on_http(rpc_url.parse().unwrap()); + let rpc = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_wallet_and_config(|anvil| Anvil::block_time(anvil, 1)); - // Make a contract taht emits events so we can pick up on them + // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin @@ -1008,84 +1021,108 @@ mod test { } } // Deploy the contract using anvil - let contract = EventEmitter::deploy(&rpc).await?; + let contract = EventEmitter::deploy(rpc.clone()).await?; // Fire off a few transactions to emit some events - let mut transactions = Vec::::new(); - - for i in 0..10 { - if i % 2 == 0 { - let builder = contract.testEmit(); - let tx_hash = builder.send().await?.watch().await?; - let transaction = rpc.get_transaction_by_hash(tx_hash).await?.unwrap(); - transactions.push(transaction); - } else { - let builder = contract.twoEmits(); - let tx_hash = builder.send().await?.watch().await?; - let transaction = rpc.get_transaction_by_hash(tx_hash).await?.unwrap(); - transactions.push(transaction); - } + + let address = rpc.default_signer_address(); + rpc.anvil_set_nonce(address, U256::from(0)).await.unwrap(); + let tx_reqs = (0..10) + .map(|i| match i % 2 { + 0 => contract + .testEmit() + .into_transaction_request() + .nonce(i as u64), + 1 => contract + .twoEmits() + .into_transaction_request() + .nonce(i as u64), + _ => unreachable!(), + }) + .collect::>(); + let mut join_set = JoinSet::new(); + tx_reqs.into_iter().for_each(|tx_req| { + let rpc_clone = rpc.clone(); + join_set.spawn(async move { + rpc_clone + .send_transaction(tx_req) + .await + .unwrap() + .watch() + .await + .unwrap() + }); + }); + + let hashes = join_set.join_all().await; + let mut transactions = Vec::new(); + for hash in hashes.into_iter() { + transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); } + let block_number = transactions.first().unwrap().block_number.unwrap(); + // We want to get the event signature so we can make a ReceiptQuery let all_events = EventEmitter::abi::events(); let events = all_events.get("testEvent").unwrap(); let receipt_query = ReceiptQuery::new(*contract.address(), events[0].clone()); - // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it - for transaction in transactions.iter() { - let index = transaction - .block_number - .ok_or(anyhow!("Could not get block number from transaction"))?; - let block = rpc - .get_block( - BlockNumberOrTag::Number(index).into(), - alloy::rpc::types::BlockTransactionsKind::Full, - ) - .await? - .ok_or(anyhow!("Could not get block test"))?; - let proofs = receipt_query - .query_receipt_proofs(&rpc, BlockNumberOrTag::Number(index)) - .await?; - - for proof in proofs.into_iter() { - let memdb = Arc::new(MemoryDB::new(true)); - let tx_trie = EthTrie::new(Arc::clone(&memdb)); + let block = rpc + .get_block( + BlockNumberOrTag::Number(block_number).into(), + alloy::rpc::types::BlockTransactionsKind::Full, + ) + .await? + .ok_or(anyhow!("Could not get block test"))?; + let receipt_hash = block.header().receipts_root; + let proofs = receipt_query + .query_receipt_proofs(&rpc.root(), BlockNumberOrTag::Number(block_number)) + .await?; - let mpt_key = transaction.transaction_index.unwrap().rlp_bytes(); - let receipt_hash = block.header().receipts_root; - let is_valid = tx_trie - .verify_proof(receipt_hash.0.into(), &mpt_key, proof.mpt_proof.clone())? - .ok_or(anyhow!("No proof found when verifying"))?; + // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it - let expected_sig: [u8; 32] = keccak256(receipt_query.event.signature().as_bytes()) + for proof in proofs.iter() { + let memdb = Arc::new(MemoryDB::new(true)); + let tx_trie = EthTrie::new(Arc::clone(&memdb)); + + let mpt_key = proof.tx_index.rlp_bytes(); + + let _ = tx_trie + .verify_proof(receipt_hash.0.into(), &mpt_key, proof.mpt_proof.clone())? + .ok_or(anyhow!("No proof found when verifying"))?; + + let last_node = proof + .mpt_proof + .last() + .ok_or(anyhow!("Couldn't get first node in proof"))?; + let expected_sig: [u8; 32] = keccak256(receipt_query.event.signature().as_bytes()) + .try_into() + .unwrap(); + + for log_offset in proof.relevant_logs_offset.iter() { + let mut buf = &last_node[*log_offset..*log_offset + proof.event_log_info.size]; + let decoded_log = Log::decode(&mut buf)?; + let raw_bytes: [u8; 20] = last_node[*log_offset + + proof.event_log_info.add_rel_offset + ..*log_offset + proof.event_log_info.add_rel_offset + 20] + .to_vec() .try_into() .unwrap(); - - for log_offset in proof.relevant_logs_offset.iter() { - let mut buf = &is_valid[*log_offset..*log_offset + proof.event_log_info.size]; - let decoded_log = Log::decode(&mut buf)?; - let raw_bytes: [u8; 20] = is_valid[*log_offset - + proof.event_log_info.add_rel_offset - ..*log_offset + proof.event_log_info.add_rel_offset + 20] - .to_vec() - .try_into() - .unwrap(); - assert_eq!(decoded_log.address, receipt_query.contract); - assert_eq!(raw_bytes, receipt_query.contract); - let topics = decoded_log.topics(); - assert_eq!(topics[0].0, expected_sig); - let raw_bytes: [u8; 32] = is_valid[*log_offset - + proof.event_log_info.sig_rel_offset - ..*log_offset + proof.event_log_info.sig_rel_offset + 32] - .to_vec() - .try_into() - .unwrap(); - assert_eq!(topics[0].0, raw_bytes); - } + assert_eq!(decoded_log.address, receipt_query.contract); + assert_eq!(raw_bytes, receipt_query.contract); + let topics = decoded_log.topics(); + assert_eq!(topics[0].0, expected_sig); + let raw_bytes: [u8; 32] = last_node[*log_offset + + proof.event_log_info.sig_rel_offset + ..*log_offset + proof.event_log_info.sig_rel_offset + 32] + .to_vec() + .try_into() + .unwrap(); + assert_eq!(topics[0].0, raw_bytes); } } + Ok(()) } diff --git a/mp2-common/src/group_hashing/mod.rs b/mp2-common/src/group_hashing/mod.rs index 05c0d34ca..819eb7c2b 100644 --- a/mp2-common/src/group_hashing/mod.rs +++ b/mp2-common/src/group_hashing/mod.rs @@ -21,8 +21,6 @@ use plonky2_ecgfp5::{ }, }; -use std::array::from_fn as create_array; - mod curve_add; pub mod field_to_curve; mod sswu_gadget; diff --git a/mp2-common/src/mpt_sequential/leaf_or_extension.rs b/mp2-common/src/mpt_sequential/leaf_or_extension.rs index 8c64d7584..e5c0cf482 100644 --- a/mp2-common/src/mpt_sequential/leaf_or_extension.rs +++ b/mp2-common/src/mpt_sequential/leaf_or_extension.rs @@ -1,6 +1,8 @@ //! MPT leaf or extension node gadget -use super::{advance_key_leaf_or_extension, key::MPTKeyWireGeneric, PAD_LEN}; +use super::{ + advance_key_leaf_or_extension, advance_key_receipt_leaf, key::MPTKeyWireGeneric, PAD_LEN, +}; use crate::{ array::{Array, Vector, VectorWire}, keccak::{InputData, KeccakCircuit, KeccakWires}, @@ -96,3 +98,61 @@ impl MPTLeafOrExtensionNodeGeneric { } } } + +/// Wrapped wires for a MPT receipt leaf +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct MPTReceiptLeafWiresGeneric +where + [(); PAD_LEN(NODE_LEN)]:, +{ + /// MPT node + pub node: VectorWire, + /// MPT root + pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + /// New MPT key after advancing the current key + pub key: MPTKeyWireGeneric, +} + +/// Receipt leaf node as we have to do things differently for efficiency reasons. +pub struct MPTReceiptLeafNode; + +impl MPTReceiptLeafNode { + /// Build the MPT node and advance the current key. + pub fn build_and_advance_key< + F: RichField + Extendable, + const D: usize, + const NODE_LEN: usize, + >( + b: &mut CircuitBuilder, + current_key: &MPTKeyWireGeneric, + ) -> MPTReceiptLeafWiresGeneric + where + [(); PAD_LEN(NODE_LEN)]:, + { + let zero = b.zero(); + let tru = b._true(); + + // Build the node and ensure it only includes bytes. + let node = VectorWire::::new(b); + + node.assert_bytes(b); + + // Expose the keccak root of this subtree starting at this node. + let root = KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::hash_vector(b, &node); + + // We know that the rlp encoding of the compact encoding of the key is going to be in roughly the first 10 bytes of + // the node since the node is list byte, 2 bytes for list length (maybe 3), key length byte (1), key compact encoding (4 max) + // so we take 10 bytes to be safe since this won't effect the number of random access gates we use. + let rlp_headers = decode_fixed_list::<_, D, 1>(b, &node.arr.arr[..10], zero); + + let (key, valid) = advance_key_receipt_leaf::( + b, + &node, + current_key, + &rlp_headers, + ); + b.connect(tru.target, valid.target); + + MPTReceiptLeafWiresGeneric { node, root, key } + } +} diff --git a/mp2-common/src/mpt_sequential/mod.rs b/mp2-common/src/mpt_sequential/mod.rs index 3c6dd8be4..e4518401a 100644 --- a/mp2-common/src/mpt_sequential/mod.rs +++ b/mp2-common/src/mpt_sequential/mod.rs @@ -38,7 +38,7 @@ pub use key::{ }; pub use leaf_or_extension::{ MPTLeafOrExtensionNode, MPTLeafOrExtensionNodeGeneric, MPTLeafOrExtensionWires, - MPTLeafOrExtensionWiresGeneric, + MPTLeafOrExtensionWiresGeneric, MPTReceiptLeafNode, MPTReceiptLeafWiresGeneric, }; /// Number of items in the RLP encoded list in a leaf node. @@ -52,7 +52,7 @@ pub const MAX_LEAF_VALUE_LEN: usize = 33; /// This is the maximum size we allow for the value of Receipt Trie leaf /// currently set to be the same as we allow for a branch node in the Storage Trie /// minus the length of the key header and key -pub const MAX_RECEIPT_LEAF_VALUE_LEN: usize = 526; +pub const MAX_RECEIPT_LEAF_VALUE_LEN: usize = 503; /// RLP item size for the extension node pub const MPT_EXTENSION_RLP_SIZE: usize = 2; @@ -443,6 +443,44 @@ pub fn advance_key_leaf_or_extension< let condition = b.and(condition, should_true); (new_key, leaf_child_hash, condition) } + +/// Returns the key with the pointer moved in the case of a Receipt Trie leaf. +pub fn advance_key_receipt_leaf< + F: RichField + Extendable, + const D: usize, + const NODE_LEN: usize, + const KEY_LEN: usize, +>( + b: &mut CircuitBuilder, + node: &VectorWire, + key: &MPTKeyWireGeneric, + rlp_headers: &RlpList<1>, +) -> (MPTKeyWireGeneric, BoolTarget) { + let key_header = RlpHeader { + data_type: rlp_headers.data_type[0], + offset: rlp_headers.offset[0], + len: rlp_headers.len[0], + }; + + // To save on operations we know the key is goin to be in the first 10 items so we + // only feed these into `decode_compact_encoding` + let sub_array: Array = Array { + arr: create_array(|i| node.arr.arr[i]), + }; + let (extracted_key, should_true) = + decode_compact_encoding::<_, _, _, KEY_LEN>(b, &sub_array, &key_header); + + // note we are going _backwards_ on the key, so we need to substract the expected key length + // we want to check against + let new_key = key.advance_by(b, extracted_key.real_len); + // NOTE: there is no need to check if the extracted_key is indeed a subvector of the full key + // in this case. Indeed, in leaf/ext. there is only one key possible. Since we decoded it + // from the beginning of the node, and that the hash of the node also starts at the beginning, + // either the attacker give the right node or it gives an invalid node and hashes will not + // match. + + (new_key, should_true) +} #[cfg(test)] mod test { use std::array::from_fn as create_array; diff --git a/mp2-test/src/circuit.rs b/mp2-test/src/circuit.rs index 262d4384e..f810dac93 100644 --- a/mp2-test/src/circuit.rs +++ b/mp2-test/src/circuit.rs @@ -105,6 +105,7 @@ pub fn prove_circuit< let now = std::time::Instant::now(); u.prove(&mut pw, &setup.0); let proof = setup.1.prove(pw).expect("invalid proof"); + println!("[+] Proof generated in {:?}ms", now.elapsed().as_millis()); setup .2 @@ -124,6 +125,7 @@ pub fn run_circuit< u: U, ) -> ProofWithPublicInputs { let setup = setup_circuit::(); + println!( "setup.verifierdata hash {:?}", setup.2.verifier_only.circuit_digest @@ -131,3 +133,100 @@ pub fn run_circuit< prove_circuit(&setup, &u) } + +/// Given a `PartitionWitness` that has only inputs set, populates the rest of the witness using the +/// given set of generators. +pub fn debug_generate_partial_witness< + 'a, + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +>( + inputs: PartialWitness, + prover_data: &'a plonky2::plonk::circuit_data::ProverOnlyCircuitData, + common_data: &'a plonky2::plonk::circuit_data::CommonCircuitData, +) -> plonky2::iop::witness::PartitionWitness<'a, F> { + use plonky2::iop::witness::WitnessWrite; + + let config = &common_data.config; + let generators = &prover_data.generators; + let generator_indices_by_watches = &prover_data.generator_indices_by_watches; + + let mut witness = plonky2::iop::witness::PartitionWitness::new( + config.num_wires, + common_data.degree(), + &prover_data.representative_map, + ); + + for (t, v) in inputs.target_values.into_iter() { + witness.set_target(t, v); + } + + // Build a list of "pending" generators which are queued to be run. Initially, all generators + // are queued. + let mut pending_generator_indices: Vec<_> = (0..generators.len()).collect(); + + // We also track a list of "expired" generators which have already returned false. + let mut generator_is_expired = vec![false; generators.len()]; + let mut remaining_generators = generators.len(); + + let mut buffer = plonky2::iop::generator::GeneratedValues::empty(); + + // Keep running generators until we fail to make progress. + while !pending_generator_indices.is_empty() { + let mut next_pending_generator_indices = Vec::new(); + + for &generator_idx in &pending_generator_indices { + if generator_is_expired[generator_idx] { + continue; + } + + let finished = generators[generator_idx].0.run(&witness, &mut buffer); + if finished { + generator_is_expired[generator_idx] = true; + remaining_generators -= 1; + } + + // Merge any generated values into our witness, and get a list of newly-populated + // targets' representatives. + let new_target_reps = buffer + .target_values + .drain(..) + .flat_map(|(t, v)| witness.set_target_returning_rep(t, v)); + + // Enqueue unfinished generators that were watching one of the newly populated targets. + for watch in new_target_reps { + let opt_watchers = generator_indices_by_watches.get(&watch); + if let Some(watchers) = opt_watchers { + for &watching_generator_idx in watchers { + if !generator_is_expired[watching_generator_idx] { + next_pending_generator_indices.push(watching_generator_idx); + } + } + } + } + } + + pending_generator_indices = next_pending_generator_indices; + } + if remaining_generators != 0 { + println!("{} generators weren't run", remaining_generators); + + let filtered = generator_is_expired + .iter() + .enumerate() + .filter_map(|(index, flag)| if !flag { Some(index) } else { None }) + .min(); + + if let Some(min_val) = filtered { + println!("generator at index: {} is the first to not run", min_val); + println!("This has ID: {}", generators[min_val].0.id()); + + for watch in generators[min_val].0.watch_list().iter() { + println!("watching: {:?}", watch); + } + } + } + + witness +} diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index d1e79caa1..570170235 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -2,8 +2,7 @@ use alloy::{ eips::BlockNumberOrTag, node_bindings::Anvil, primitives::U256, - providers::{ext::AnvilApi, Provider, ProviderBuilder, RootProvider, WalletProvider}, - rpc::types::Transaction, + providers::{ext::AnvilApi, Provider, ProviderBuilder, WalletProvider}, sol, }; use eth_trie::{EthTrie, MemoryDB, Trie}; @@ -53,7 +52,7 @@ pub fn generate_random_storage_mpt( /// This function is used so that we can generate a Receipt Trie for a blog with varying transactions /// (i.e. some we are interested in and some we are not). -fn generate_receipt_proofs() -> Vec { +pub fn generate_receipt_proofs() -> Vec { // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] @@ -179,15 +178,3 @@ fn generate_receipt_proofs() -> Vec { .unwrap() }) } - -#[cfg(test)] -mod tests { - use super::*; - #[test] - fn tester() { - let receipt_proofs = generate_receipt_proofs(); - for proof in receipt_proofs.iter() { - println!("proof: {}", proof.tx_index); - } - } -} diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index 2c3b0bc95..1db586f80 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -17,6 +17,7 @@ pub const MAX_BRANCH_NODE_LEN_PADDED: usize = PAD_LEN(532); pub const MAX_EXTENSION_NODE_LEN: usize = 69; pub const MAX_EXTENSION_NODE_LEN_PADDED: usize = PAD_LEN(69); pub const MAX_LEAF_NODE_LEN: usize = MAX_EXTENSION_NODE_LEN; +pub const MAX_RECEIPT_LEAF_NODE_LEN: usize = 512; pub mod api; pub mod block_extraction; diff --git a/mp2-v1/src/receipt_extraction/leaf.rs b/mp2-v1/src/receipt_extraction/leaf.rs index f7c99d8a7..8fca8a1c5 100644 --- a/mp2-v1/src/receipt_extraction/leaf.rs +++ b/mp2-v1/src/receipt_extraction/leaf.rs @@ -1,17 +1,15 @@ //! Module handling the leaf node inside a Receipt Trie -use super::public_inputs::PublicInputArgs; +use crate::MAX_RECEIPT_LEAF_NODE_LEN; + +use super::public_inputs::{PublicInputArgs, PublicInputs}; use mp2_common::{ array::{Array, Vector, VectorWire}, eth::{EventLogInfo, LogDataInfo, ReceiptProofInfo}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires}, - mpt_sequential::{ - MPTLeafOrExtensionNodeGeneric, ReceiptKeyWire, MAX_RECEIPT_LEAF_VALUE_LEN, - MAX_TX_KEY_NIBBLE_LEN, PAD_LEN, - }, - poseidon::H, + mpt_sequential::{MPTReceiptLeafNode, ReceiptKeyWire, MAX_TX_KEY_NIBBLE_LEN, PAD_LEN}, public_inputs::PublicInputCommon, types::{CBuilder, GFp}, utils::{Endianness, PackerTarget}, @@ -23,13 +21,15 @@ use plonky2::{ target::Target, witness::{PartialWitness, WitnessWrite}, }, + plonk::circuit_builder::CircuitBuilder, }; use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; +use recursion_framework::circuit_builder::CircuitLogicWires; use rlp::Encodable; use serde::{Deserialize, Serialize}; - +use std::array::from_fn; /// Maximum number of logs per transaction we can process const MAX_LOGS_PER_TX: usize = 2; @@ -42,10 +42,10 @@ where pub event: EventWires, /// The node bytes pub node: VectorWire, - /// The actual value stored in the node - pub value: Array, /// the hash of the node bytes pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, + /// The index of this receipt in the block + pub index: Target, /// The offset of the status of the transaction in the RLP encoded receipt node. pub status_offset: Target, /// The offsets of the relevant logs inside the node @@ -102,7 +102,7 @@ impl LogColumn { impl EventWires { /// Convert to an array for metadata digest - pub fn to_slice(&self) -> [Target; 70] { + pub fn to_vec(&self) -> Vec { let topics_flat = self .topics .iter() @@ -113,60 +113,45 @@ impl EventWires { .iter() .flat_map(|t| t.to_array()) .collect::>(); - let mut out = [Target::default(); 70]; - out[0] = self.size; - out.iter_mut() - .skip(1) - .take(20) - .enumerate() - .for_each(|(i, entry)| *entry = self.address.arr[i]); - out[21] = self.add_rel_offset; - out.iter_mut() - .skip(22) - .take(32) - .enumerate() - .for_each(|(i, entry)| *entry = self.event_signature.arr[i]); - out[54] = self.sig_rel_offset; - out.iter_mut() - .skip(55) - .take(9) - .enumerate() - .for_each(|(i, entry)| *entry = topics_flat[i]); - out.iter_mut() - .skip(64) - .take(6) - .enumerate() - .for_each(|(i, entry)| *entry = data_flat[i]); + let mut out = Vec::new(); + out.push(self.size); + out.extend_from_slice(&self.address.arr); + out.push(self.add_rel_offset); + out.extend_from_slice(&self.event_signature.arr); + out.push(self.sig_rel_offset); + out.extend_from_slice(&topics_flat); + out.extend_from_slice(&data_flat); + out } - pub fn verify_logs_and_extract_values( + pub fn verify_logs_and_extract_values( &self, b: &mut CBuilder, - value: &Array, + value: &VectorWire, status_offset: Target, relevant_logs_offsets: &VectorWire, ) -> CurveTarget { let t = b._true(); let zero = b.zero(); let curve_zero = b.curve_zero(); - let mut value_digest = b.curve_zero(); + let mut points = Vec::new(); // Enforce status is true. - let status = value.random_access_large_array(b, status_offset); + let status = value.arr.random_access_large_array(b, status_offset); b.connect(status, t.target); for log_offset in relevant_logs_offsets.arr.arr { // Extract the address bytes let address_start = b.add(log_offset, self.add_rel_offset); - let address_bytes = value.extract_array_large::<_, _, 20>(b, address_start); + let address_bytes = value.arr.extract_array_large::<_, _, 20>(b, address_start); let address_check = address_bytes.equals(b, &self.address); // Extract the signature bytes let sig_start = b.add(log_offset, self.sig_rel_offset); - let sig_bytes = value.extract_array_large::<_, _, 32>(b, sig_start); + let sig_bytes = value.arr.extract_array_large::<_, _, 32>(b, sig_start); let sig_check = sig_bytes.equals(b, &self.event_signature); @@ -182,7 +167,7 @@ impl EventWires { for &log_column in self.topics.iter().chain(self.data.iter()) { let data_start = b.add(log_offset, log_column.rel_byte_offset); // The data is always 32 bytes long - let data_bytes = value.extract_array_large::<_, _, 32>(b, data_start); + let data_bytes = value.arr.extract_array_large::<_, _, 32>(b, data_start); // Pack the data and get the digest let packed_data = data_bytes.arr.pack(b, Endianness::Big); @@ -197,11 +182,11 @@ impl EventWires { let selector = b.and(dummy_column, dummy); let selected_point = b.select_curve_point(selector, curve_zero, data_digest); - value_digest = b.add_curve_point(&[selected_point, value_digest]); + points.push(selected_point); } } - value_digest + b.add_curve_point(&points) } } @@ -215,7 +200,7 @@ impl ReceiptLeafCircuit where [(); PAD_LEN(NODE_LEN)]:, { - pub fn build_leaf_wires(b: &mut CBuilder) -> ReceiptLeafWires { + pub fn build(b: &mut CBuilder) -> ReceiptLeafWires { // Build the event wires let event_wires = Self::build_event_wires(b); @@ -227,27 +212,24 @@ where let mpt_key = ReceiptKeyWire::new(b); // Build the node wires. - let wires = MPTLeafOrExtensionNodeGeneric::build_and_advance_key::< - _, - D, - NODE_LEN, - MAX_RECEIPT_LEAF_VALUE_LEN, - >(b, &mpt_key); + let wires = MPTReceiptLeafNode::build_and_advance_key::<_, D, NODE_LEN>(b, &mpt_key); + let node = wires.node; let root = wires.root; // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for - let receipt_body = wires.value; - let mut dv = event_wires.verify_logs_and_extract_values( + let mut dv = event_wires.verify_logs_and_extract_values::( b, - &receipt_body, + &node, status_offset, &relevant_logs_offset, ); + let value_id = b.map_to_curve_point(&[index]); + dv = b.add_curve_point(&[value_id, dv]); - let dm = b.hash_n_to_hash_no_pad::(event_wires.to_slice().to_vec()); + let dm = b.map_to_curve_point(&event_wires.to_vec()); // Register the public inputs PublicInputArgs { @@ -261,8 +243,8 @@ where ReceiptLeafWires { event: event_wires, node, - value: receipt_body, root, + index, status_offset, relevant_logs_offset, mpt_key, @@ -273,24 +255,22 @@ where let size = b.add_virtual_target(); // Packed address - let arr = [b.add_virtual_target(); 20]; - let address = Array::from_array(arr); + let address = Array::::new(b); // relative offset of the address let add_rel_offset = b.add_virtual_target(); // Event signature - let arr = [b.add_virtual_target(); 32]; - let event_signature = Array::from_array(arr); + let event_signature = Array::::new(b); // Signature relative offset let sig_rel_offset = b.add_virtual_target(); // topics - let topics = [Self::build_log_column(b); 3]; + let topics: [LogColumn; 3] = from_fn(|_| Self::build_log_column(b)); // data - let data = [Self::build_log_column(b); 2]; + let data: [LogColumn; 2] = from_fn(|_| Self::build_log_column(b)); EventWires { size, @@ -331,7 +311,7 @@ where &wires.root, &InputData::Assigned(&pad_node), ); - + pw.set_target(wires.index, GFp::from_canonical_u64(self.info.tx_index)); pw.set_target( wires.status_offset, GFp::from_canonical_usize(self.info.status_offset), @@ -406,13 +386,47 @@ where } } +/// Num of children = 0 +impl CircuitLogicWires for ReceiptLeafWires { + type CircuitBuilderParams = (); + + type Inputs = ReceiptLeafCircuit; + + const NUM_PUBLIC_INPUTS: usize = PublicInputs::::TOTAL_LEN; + + fn circuit_logic( + builder: &mut CircuitBuilder, + _verified_proofs: [&plonky2::plonk::proof::ProofWithPublicInputsTarget; 0], + _builder_parameters: Self::CircuitBuilderParams, + ) -> Self { + ReceiptLeafCircuit::build(builder) + } + + fn assign_input( + &self, + inputs: Self::Inputs, + pw: &mut PartialWitness, + ) -> anyhow::Result<()> { + inputs.assign(pw, self); + Ok(()) + } +} + #[cfg(test)] mod tests { use super::*; + use crate::receipt_extraction::compute_receipt_leaf_metadata_digest; + use mp2_common::{ + utils::{keccak256, Packer}, + C, + }; + use mp2_test::{ + circuit::{run_circuit, UserCircuit}, + mpt_sequential::generate_receipt_proofs, + }; #[derive(Clone, Debug)] struct TestReceiptLeafCircuit { c: ReceiptLeafCircuit, - exp_value: Vec, } impl UserCircuit for TestReceiptLeafCircuit @@ -420,91 +434,38 @@ mod tests { [(); PAD_LEN(NODE_LEN)]:, { // Leaf wires + expected extracted value - type Wires = ( - ReceiptLeafWires, - Array, - ); + type Wires = ReceiptLeafWires; fn build(b: &mut CircuitBuilder) -> Self::Wires { - let exp_value = Array::::new(b); - - let leaf_wires = ReceiptLeafCircuit::::build(b); - leaf_wires.value.enforce_equal(b, &exp_value); - - (leaf_wires, exp_value) + ReceiptLeafCircuit::::build(b) } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - self.c.assign(pw, &wires.0); - wires - .1 - .assign_bytes(pw, &self.exp_value.clone().try_into().unwrap()); + self.c.assign(pw, &wires); } } #[test] fn test_leaf_circuit() { - const NODE_LEN: usize = 80; - - let simple_slot = 2_u8; - let slot = StorageSlot::Simple(simple_slot as usize); - let contract_address = Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(); - let chain_id = 10; - let id = identifier_single_var_column(simple_slot, &contract_address, chain_id, vec![]); - - let (mut trie, _) = generate_random_storage_mpt::<3, MAPPING_LEAF_VALUE_LEN>(); - let value = random_vector(MAPPING_LEAF_VALUE_LEN); - let encoded_value: Vec = rlp::encode(&value).to_vec(); - // assert we added one byte of RLP header - assert_eq!(encoded_value.len(), MAPPING_LEAF_VALUE_LEN + 1); - println!("encoded value {:?}", encoded_value); - trie.insert(&slot.mpt_key(), &encoded_value).unwrap(); - trie.root_hash().unwrap(); - - let proof = trie.get_proof(&slot.mpt_key_vec()).unwrap(); - let node = proof.last().unwrap().clone(); - - let c = LeafSingleCircuit:: { - node: node.clone(), - slot: SimpleSlot::new(simple_slot), - id, - }; - let test_circuit = TestLeafSingleCircuit { - c, - exp_value: value.clone(), - }; + const NODE_LEN: usize = 512; + + let receipt_proof_infos = generate_receipt_proofs(); + let info = receipt_proof_infos.first().unwrap().clone(); + let c = ReceiptLeafCircuit:: { info: info.clone() }; + let test_circuit = TestReceiptLeafCircuit { c }; let proof = run_circuit::(test_circuit); let pi = PublicInputs::new(&proof.public_inputs); - + let node = info.mpt_proof.last().unwrap().clone(); + // Check the output hash { let exp_hash = keccak256(&node).pack(Endianness::Little); assert_eq!(pi.root_hash(), exp_hash); } - { - let (key, ptr) = pi.mpt_key_info(); - - let exp_key = slot.mpt_key_vec(); - let exp_key: Vec<_> = bytes_to_nibbles(&exp_key) - .into_iter() - .map(F::from_canonical_u8) - .collect(); - assert_eq!(key, exp_key); - - let leaf_key: Vec> = rlp::decode_list(&node); - let nib = Nibbles::from_compact(&leaf_key[0]); - let exp_ptr = F::from_canonical_usize(MAX_KEY_NIBBLE_LEN - 1 - nib.nibbles().len()); - assert_eq!(exp_ptr, ptr); - } - // Check values digest - { - let exp_digest = compute_leaf_single_values_digest(id, &value); - assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); - } + // Check metadata digest { - let exp_digest = compute_leaf_single_metadata_digest(id, simple_slot); + let exp_digest = compute_receipt_leaf_metadata_digest(&info.event_log_info); assert_eq!(pi.metadata_digest(), exp_digest.to_weierstrass()); } - assert_eq!(pi.n(), F::ONE); } -} \ No newline at end of file +} diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs index 6c3803e08..4950aef20 100644 --- a/mp2-v1/src/receipt_extraction/mod.rs +++ b/mp2-v1/src/receipt_extraction/mod.rs @@ -1,2 +1,31 @@ pub mod leaf; pub mod public_inputs; + +use mp2_common::{ + digest::Digest, eth::EventLogInfo, group_hashing::map_to_curve_point, types::GFp, +}; +use plonky2::field::types::Field; + +/// Calculate `metadata_digest = D(key_id || value_id || slot)` for receipt leaf. +pub fn compute_receipt_leaf_metadata_digest(event: &EventLogInfo) -> Digest { + let topics_flat = event + .topics + .iter() + .chain(event.data.iter()) + .flat_map(|t| [t.column_id, t.rel_byte_offset, t.len]) + .collect::>(); + + let mut out = Vec::new(); + out.push(event.size); + out.extend_from_slice(&event.address.0.map(|byte| byte as usize)); + out.push(event.add_rel_offset); + out.extend_from_slice(&event.event_signature.map(|byte| byte as usize)); + out.push(event.sig_rel_offset); + out.extend_from_slice(&topics_flat); + + let data = out + .into_iter() + .map(GFp::from_canonical_usize) + .collect::>(); + map_to_curve_point(&data) +} diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs index 901fc0b29..7a44ed175 100644 --- a/mp2-v1/src/receipt_extraction/public_inputs.rs +++ b/mp2-v1/src/receipt_extraction/public_inputs.rs @@ -1,14 +1,22 @@ //! Public inputs for Receipt Extraction circuits use mp2_common::{ + array::Array, keccak::{OutputHash, PACKED_HASH_LEN}, mpt_sequential::ReceiptKeyWire, public_inputs::{PublicInputCommon, PublicInputRange}, - types::{CBuilder, CURVE_TARGET_LEN}, + types::{CBuilder, GFp, GFp5, CURVE_TARGET_LEN}, + utils::{convert_point_to_curve_target, convert_slice_to_curve_point, FromTargets}, }; -use plonky2::hash::hash_types::{HashOutTarget, NUM_HASH_OUT_ELTS}; -use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; +use plonky2::{ + field::{extension::FieldExtension, types::Field}, + iop::target::Target, +}; +use plonky2_ecgfp5::{ + curve::curve::WeierstrassPoint, + gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, +}; /// The maximum length of a transaction index in a block in nibbles. /// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 3 bytes to represent. @@ -23,7 +31,7 @@ const T_RANGE: PublicInputRange = K_RANGE.end..K_RANGE.end + 1; /// - `DV : Digest[F]` : value digest of all rows to extract const DV_RANGE: PublicInputRange = T_RANGE.end..T_RANGE.end + CURVE_TARGET_LEN; /// - `DM : Digest[F]` : metadata digest to extract -const DM_RANGE: PublicInputRange = DV_RANGE.end..DV_RANGE.end + NUM_HASH_OUT_ELTS; +const DM_RANGE: PublicInputRange = DV_RANGE.end..DV_RANGE.end + CURVE_TARGET_LEN; /// Public inputs for contract extraction #[derive(Clone, Debug)] @@ -35,7 +43,7 @@ pub struct PublicInputArgs<'a> { /// Digest of the values pub(crate) dv: CurveTarget, /// The poseidon hash of the metadata - pub(crate) dm: HashOutTarget, + pub(crate) dm: CurveTarget, } impl<'a> PublicInputCommon for PublicInputArgs<'a> { @@ -48,12 +56,7 @@ impl<'a> PublicInputCommon for PublicInputArgs<'a> { impl<'a> PublicInputArgs<'a> { /// Create a new public inputs. - pub fn new( - h: &'a OutputHash, - k: &'a ReceiptKeyWire, - dv: CurveTarget, - dm: HashOutTarget, - ) -> Self { + pub fn new(h: &'a OutputHash, k: &'a ReceiptKeyWire, dv: CurveTarget, dm: CurveTarget) -> Self { Self { h, k, dv, dm } } } @@ -63,14 +66,105 @@ impl<'a> PublicInputArgs<'a> { self.h.register_as_public_input(cb); self.k.register_as_input(cb); cb.register_curve_public_input(self.dv); - cb.register_public_inputs(&self.dm.elements); + cb.register_curve_public_input(self.dm); } pub fn digest_value(&self) -> CurveTarget { self.dv } - pub fn digest_metadata(&self) -> HashOutTarget { + pub fn digest_metadata(&self) -> CurveTarget { self.dm } } + +/// Public inputs wrapper of any proof generated in this module +#[derive(Clone, Debug)] +pub struct PublicInputs<'a, T> { + pub(crate) proof_inputs: &'a [T], +} + +impl PublicInputs<'_, Target> { + /// Get the merkle hash of the subtree this proof has processed. + pub fn root_hash_target(&self) -> OutputHash { + OutputHash::from_targets(self.root_hash_info()) + } + + /// Get the MPT key defined over the public inputs. + pub fn mpt_key(&self) -> ReceiptKeyWire { + let (key, ptr) = self.mpt_key_info(); + ReceiptKeyWire { + key: Array { + arr: std::array::from_fn(|i| key[i]), + }, + pointer: ptr, + } + } + + /// Get the values digest defined over the public inputs. + pub fn values_digest_target(&self) -> CurveTarget { + convert_point_to_curve_target(self.values_digest_info()) + } + + /// Get the metadata digest defined over the public inputs. + pub fn metadata_digest_target(&self) -> CurveTarget { + convert_point_to_curve_target(self.metadata_digest_info()) + } +} + +impl PublicInputs<'_, GFp> { + /// Get the merkle hash of the subtree this proof has processed. + pub fn root_hash(&self) -> Vec { + let hash = self.root_hash_info(); + hash.iter().map(|t| t.0 as u32).collect() + } + + /// Get the values digest defined over the public inputs. + pub fn values_digest(&self) -> WeierstrassPoint { + let (x, y, is_inf) = self.values_digest_info(); + + WeierstrassPoint { + x: GFp5::from_basefield_array(std::array::from_fn::(|i| x[i])), + y: GFp5::from_basefield_array(std::array::from_fn::(|i| y[i])), + is_inf: is_inf.is_nonzero(), + } + } + + /// Get the metadata digest defined over the public inputs. + pub fn metadata_digest(&self) -> WeierstrassPoint { + let (x, y, is_inf) = self.metadata_digest_info(); + + WeierstrassPoint { + x: GFp5::from_basefield_array(std::array::from_fn::(|i| x[i])), + y: GFp5::from_basefield_array(std::array::from_fn::(|i| y[i])), + is_inf: is_inf.is_nonzero(), + } + } +} + +impl<'a, T: Copy> PublicInputs<'a, T> { + pub(crate) const TOTAL_LEN: usize = DM_RANGE.end; + + pub fn new(proof_inputs: &'a [T]) -> Self { + Self { proof_inputs } + } + + pub fn root_hash_info(&self) -> &[T] { + &self.proof_inputs[H_RANGE] + } + + pub fn mpt_key_info(&self) -> (&[T], T) { + let key = &self.proof_inputs[K_RANGE]; + let ptr = self.proof_inputs[T_RANGE.start]; + + (key, ptr) + } + + pub fn values_digest_info(&self) -> ([T; 5], [T; 5], T) { + convert_slice_to_curve_point(&self.proof_inputs[DV_RANGE]) + } + + pub fn metadata_digest_info(&self) -> ([T; 5], [T; 5], T) { + convert_slice_to_curve_point(&self.proof_inputs[DM_RANGE]) + } +} diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index a1bcaa6a8..2cf084122 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -185,7 +185,7 @@ macro_rules! impl_branch_circuits { } /// generates a proof from the inputs stored in `branch`. Depending on the size of the node, /// and the number of children proofs, it selects the right specialized circuit to generate the proof. - fn generate_proof( + pub fn generate_proof( &self, set: &RecursiveCircuits, branch_node: InputNode, From ba702eef584586150199d73b875839986fbdeaa3 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Fri, 8 Nov 2024 15:57:51 +0000 Subject: [PATCH 06/15] Change Receipt query test --- mp2-common/src/eth.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 04abeb373..db9b6da98 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -646,7 +646,7 @@ impl BlockUtil { // recompute the receipts trie by first converting all receipts form RPC type to consensus type // since in Alloy these are two different types and RLP functions are only implemented for // consensus ones. - fn check(&mut self) -> Result<()> { + pub fn check(&mut self) -> Result<()> { let computed = self.receipts_trie.root_hash()?; let tx_computed = self.transactions_trie.root_hash()?; let expected = self.block.header.receipts_root; @@ -839,8 +839,8 @@ mod test { use alloy::{ node_bindings::Anvil, - primitives::{Bytes, Log, U256}, - providers::{ext::AnvilApi, Provider, ProviderBuilder, WalletProvider}, + primitives::{Bytes, Log}, + providers::{Provider, ProviderBuilder, WalletProvider}, rlp::Decodable, sol, }; @@ -1026,17 +1026,18 @@ mod test { // Fire off a few transactions to emit some events let address = rpc.default_signer_address(); - rpc.anvil_set_nonce(address, U256::from(0)).await.unwrap(); + let current_nonce = rpc.get_transaction_count(address).await?; + let tx_reqs = (0..10) .map(|i| match i % 2 { 0 => contract .testEmit() .into_transaction_request() - .nonce(i as u64), + .nonce(current_nonce + i as u64), 1 => contract .twoEmits() .into_transaction_request() - .nonce(i as u64), + .nonce(current_nonce + i as u64), _ => unreachable!(), }) .collect::>(); From bbf02b05a46d909abf770fa08a386519ddf17496 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 11 Nov 2024 11:55:02 +0000 Subject: [PATCH 07/15] Address review comments --- Cargo.toml | 1 + mp2-common/Cargo.toml | 1 - mp2-common/src/array.rs | 24 ++++++- mp2-common/src/eth.rs | 51 ++++++++------- mp2-common/src/mpt_sequential/key.rs | 2 +- mp2-test/src/circuit.rs | 2 +- mp2-test/src/mpt_sequential.rs | 43 ++++++------- mp2-v1/src/block_extraction/mod.rs | 4 -- mp2-v1/src/receipt_extraction/mod.rs | 4 +- .../src/receipt_extraction/public_inputs.rs | 8 +-- mp2-v1/src/values_extraction/api.rs | 2 +- rustc-ice-2024-11-04T12_36_50-74186.txt | 63 ------------------- rustc-ice-2024-11-04T12_37_01-74253.txt | 62 ------------------ rustc-ice-2024-11-04T12_37_13-74307.txt | 62 ------------------ 14 files changed, 80 insertions(+), 249 deletions(-) delete mode 100644 rustc-ice-2024-11-04T12_36_50-74186.txt delete mode 100644 rustc-ice-2024-11-04T12_37_01-74253.txt delete mode 100644 rustc-ice-2024-11-04T12_37_13-74307.txt diff --git a/Cargo.toml b/Cargo.toml index 885c85707..4dd4f5736 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,6 +31,7 @@ alloy = { version = "0.6", default-features = false, features = [ "transports", "postgres", ] } + anyhow = "1.0" base64 = "0.22" bb8 = "0.8.5" diff --git a/mp2-common/Cargo.toml b/mp2-common/Cargo.toml index 5f9d71079..1be109995 100644 --- a/mp2-common/Cargo.toml +++ b/mp2-common/Cargo.toml @@ -31,7 +31,6 @@ hex.workspace = true rand.workspace = true rstest.workspace = true tokio.workspace = true - mp2_test = { path = "../mp2-test" } [features] diff --git a/mp2-common/src/array.rs b/mp2-common/src/array.rs index 38872794c..523ebf1f0 100644 --- a/mp2-common/src/array.rs +++ b/mp2-common/src/array.rs @@ -606,6 +606,19 @@ where /// This function allows you to search a larger [`Array`] by representing it as a number of /// smaller [`Array`]s with size [`RANDOM_ACCESS_SIZE`], padding the final smaller array where required. + /// For example if we have an array of length `512` and we wish to find the value at index `324` the following + /// occurs: + /// 1) Split the original [`Array`] into `512 / 64 = 8` chunks `[A_0, ... , A_7]` + /// 2) Express `324` in base 64 (Little Endian) `[4, 5]` + /// 3) For each `i \in [0, 7]` use a [`RandomAccesGate`] to lookup the `4`th element, `v_i,3` of `A_i` + /// and create a new list of length `8` that consists of `[v_0,3, v_1,3, ... v_7,3]` + /// 4) Now use another [`RandomAccessGate`] to select the `5`th elemnt of this new list (`v_4,3` as we have zero-indexed both times) + /// + /// For comparison using [`Self::value_at`] on an [`Array`] with length `512` results in 129 rows, using this method + /// on the same [`Array`] results in 15 rows. + /// + /// As an aside, if the [`Array`] length is not divisible by `64` then we pad with zero values, since the size of the + /// [`Array`] is a compile time constant this will not affect circuit preprocessing. pub fn random_access_large_array, const D: usize>( &self, b: &mut CircuitBuilder, @@ -660,9 +673,12 @@ where T::from_target(b.random_access(high_bits, first_search)) } - /// Returns [`self[at..at+SUB_SIZE]`]. - /// This is more expensive than [`Self::extract_array`] due to using [`Self::random_access_large_array`] + /// Returns [`Self[at..at+SUB_SIZE]`]. + /// This is more expensive than [`Self::extract_array`] for [`Array`]s that are shorter than 64 elements long due to using [`Self::random_access_large_array`] /// instead of [`Self::value_at`]. This function enforces that the values extracted are within the array. + /// + /// For comparison usin [`Self::extract_array`] on an [`Array`] of size `512` results in 5179 rows, using this method instead + /// results in 508 rows. pub fn extract_array_large< F: RichField + Extendable, const D: usize, @@ -687,7 +703,6 @@ where let i_target = b.constant(F::from_canonical_usize(i)); let i_plus_n_target = b.add(at, i_target); - self.random_access_large_array(b, i_plus_n_target) }), } @@ -927,6 +942,7 @@ mod test { let index = c.add_virtual_target(); let extracted = array.random_access_large_array(c, index); c.connect(exp_value, extracted); + (array, index, exp_value) } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { @@ -937,6 +953,7 @@ mod test { pw.set_target(wires.2, F::from_canonical_u8(self.exp)); } } + let mut rng = thread_rng(); let mut arr = [0u8; SIZE]; rng.fill(&mut arr[..]); @@ -1030,6 +1047,7 @@ mod test { .assign(pw, &create_array(|i| F::from_canonical_u8(self.exp[i]))); } } + let mut rng = thread_rng(); let mut arr = [0u8; SIZE]; rng.fill(&mut arr[..]); diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index db9b6da98..3e8e3fa2d 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -688,9 +688,7 @@ mod tryethers { use eth_trie::{EthTrie, MemoryDB, Trie}; use ethers::{ providers::{Http, Middleware, Provider}, - types::{ - Block, BlockId, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, H256, U64, - }, + types::{BlockId, Bytes, Transaction, TransactionReceipt, U64}, }; use rlp::{Encodable, RlpStream}; @@ -838,12 +836,15 @@ mod test { use std::str::FromStr; use alloy::{ + network::TransactionBuilder, node_bindings::Anvil, - primitives::{Bytes, Log}, - providers::{Provider, ProviderBuilder, WalletProvider}, + primitives::{Bytes, Log, U256}, + providers::{ext::AnvilApi, Provider, ProviderBuilder}, rlp::Decodable, sol, }; + use alloy_multicall::Multicall; + use eth_trie::Nibbles; use ethereum_types::U64; use ethers::{ @@ -991,8 +992,7 @@ mod test { #[tokio::test] async fn test_receipt_query() -> Result<()> { let rpc = ProviderBuilder::new() - .with_recommended_fillers() - .on_anvil_with_wallet_and_config(|anvil| Anvil::block_time(anvil, 1)); + .on_anvil_with_config(|anvil| Anvil::fork(anvil, get_sepolia_url())); // Make a contract that emits events so we can pick up on them sol! { @@ -1023,30 +1023,37 @@ mod test { // Deploy the contract using anvil let contract = EventEmitter::deploy(rpc.clone()).await?; - // Fire off a few transactions to emit some events - - let address = rpc.default_signer_address(); - let current_nonce = rpc.get_transaction_count(address).await?; - + // (0..10).for_each(|j| { + // match i % 2 { + // 0 => multicall.add_call(), + // 1 => contract.twoEmits().into_transaction_request(), + // _ => unreachable!(), + // } + // }); let tx_reqs = (0..10) .map(|i| match i % 2 { - 0 => contract - .testEmit() - .into_transaction_request() - .nonce(current_nonce + i as u64), - 1 => contract - .twoEmits() - .into_transaction_request() - .nonce(current_nonce + i as u64), + 0 => contract.testEmit().into_transaction_request(), + 1 => contract.twoEmits().into_transaction_request(), _ => unreachable!(), }) .collect::>(); let mut join_set = JoinSet::new(); + tx_reqs.into_iter().for_each(|tx_req| { let rpc_clone = rpc.clone(); join_set.spawn(async move { rpc_clone - .send_transaction(tx_req) + .anvil_auto_impersonate_account(true) + .await + .unwrap(); + let sender_address = Address::random(); + let balance = U256::from(1e18 as u64); + rpc_clone + .anvil_set_balance(sender_address, balance) + .await + .unwrap(); + rpc_clone + .send_transaction(tx_req.with_from(sender_address)) .await .unwrap() .watch() @@ -1062,7 +1069,7 @@ mod test { } let block_number = transactions.first().unwrap().block_number.unwrap(); - + println!("block number: {block_number}"); // We want to get the event signature so we can make a ReceiptQuery let all_events = EventEmitter::abi::events(); diff --git a/mp2-common/src/mpt_sequential/key.rs b/mp2-common/src/mpt_sequential/key.rs index f98b57aac..2a14780d7 100644 --- a/mp2-common/src/mpt_sequential/key.rs +++ b/mp2-common/src/mpt_sequential/key.rs @@ -19,7 +19,7 @@ pub type MPTKeyWire = MPTKeyWireGeneric; pub type ReceiptKeyWire = MPTKeyWireGeneric; -pub const MAX_TX_KEY_NIBBLE_LEN: usize = 6; +pub const MAX_TX_KEY_NIBBLE_LEN: usize = 4; /// Calculate the pointer from the MPT key. pub fn mpt_key_ptr(mpt_key: &[u8]) -> usize { diff --git a/mp2-test/src/circuit.rs b/mp2-test/src/circuit.rs index f810dac93..bed5a98c9 100644 --- a/mp2-test/src/circuit.rs +++ b/mp2-test/src/circuit.rs @@ -85,7 +85,7 @@ pub fn setup_circuit< }; println!("[+] Circuit data built in {:?}s", now.elapsed().as_secs()); - + println!("FRI config: {:?}", circuit_data.common.fri_params); (wires, circuit_data, vcd) } diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 570170235..3ab1346e1 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -1,8 +1,9 @@ use alloy::{ eips::BlockNumberOrTag, + network::TransactionBuilder, node_bindings::Anvil, - primitives::U256, - providers::{ext::AnvilApi, Provider, ProviderBuilder, WalletProvider}, + primitives::{Address, U256}, + providers::{ext::AnvilApi, Provider, ProviderBuilder}, sol, }; use eth_trie::{EthTrie, MemoryDB, Trie}; @@ -111,9 +112,7 @@ pub fn generate_receipt_proofs() -> Vec { rt.block_on(async { // Spin up a local node. - let rpc = ProviderBuilder::new() - .with_recommended_fillers() - .on_anvil_with_wallet_and_config(|a| Anvil::block_time(a, 1)); + let rpc = ProviderBuilder::new().on_anvil_with_config(|anvil| Anvil::block_time(anvil, 1)); // Deploy the contract using anvil let event_contract = EventEmitter::deploy(rpc.clone()).await.unwrap(); @@ -121,26 +120,12 @@ pub fn generate_receipt_proofs() -> Vec { // Deploy the contract using anvil let other_contract = OtherEmitter::deploy(rpc.clone()).await.unwrap(); - let address = rpc.default_signer_address(); - rpc.anvil_set_nonce(address, U256::from(0)).await.unwrap(); let tx_reqs = (0..25) .map(|i| match i % 4 { - 0 => event_contract - .testEmit() - .into_transaction_request() - .nonce(i as u64), - 1 => event_contract - .twoEmits() - .into_transaction_request() - .nonce(i as u64), - 2 => other_contract - .otherEmit() - .into_transaction_request() - .nonce(i as u64), - 3 => other_contract - .twoEmits() - .into_transaction_request() - .nonce(i as u64), + 0 => event_contract.testEmit().into_transaction_request(), + 1 => event_contract.twoEmits().into_transaction_request(), + 2 => other_contract.otherEmit().into_transaction_request(), + 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), }) .collect::>(); @@ -148,8 +133,18 @@ pub fn generate_receipt_proofs() -> Vec { tx_reqs.into_iter().for_each(|tx_req| { let rpc_clone = rpc.clone(); join_set.spawn(async move { + let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); rpc_clone - .send_transaction(tx_req) + .anvil_set_balance(sender_address, funding) + .await + .unwrap(); + rpc_clone + .anvil_auto_impersonate_account(true) + .await + .unwrap(); + rpc_clone + .send_transaction(tx_req.with_from(sender_address)) .await .unwrap() .watch() diff --git a/mp2-v1/src/block_extraction/mod.rs b/mp2-v1/src/block_extraction/mod.rs index 79ff29640..9515ea5ef 100644 --- a/mp2-v1/src/block_extraction/mod.rs +++ b/mp2-v1/src/block_extraction/mod.rs @@ -121,11 +121,7 @@ mod test { ); assert_eq!( U256::from_fields(pi.block_number_raw()), -<<<<<<< HEAD - U256::from(block.header.number), -======= U256::from(block.header.number) ->>>>>>> 6072e82 (test with receipts encoding) ); assert_eq!( pi.state_root_raw(), diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs index 4950aef20..a21f7fc41 100644 --- a/mp2-v1/src/receipt_extraction/mod.rs +++ b/mp2-v1/src/receipt_extraction/mod.rs @@ -6,7 +6,9 @@ use mp2_common::{ }; use plonky2::field::types::Field; -/// Calculate `metadata_digest = D(key_id || value_id || slot)` for receipt leaf. +/// Calculate `metadata_digest = D(address || signature || topics)` for receipt leaf. +/// Topics is an array of 5 values (some are dummies), each being `column_id`, `rel_byte_offset` (from the start of the log) +/// and `len`. pub fn compute_receipt_leaf_metadata_digest(event: &EventLogInfo) -> Digest { let topics_flat = event .topics diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs index 7a44ed175..e4fc8d5b9 100644 --- a/mp2-v1/src/receipt_extraction/public_inputs.rs +++ b/mp2-v1/src/receipt_extraction/public_inputs.rs @@ -19,14 +19,14 @@ use plonky2_ecgfp5::{ }; /// The maximum length of a transaction index in a block in nibbles. -/// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 3 bytes to represent. -const MAX_INDEX_NIBBLES: usize = 6; +/// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 2 bytes to represent. +const MAX_INDEX_NIBBLES: usize = 4; // Contract extraction public Inputs: /// - `H : [8]F` : packed node hash const H_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; -/// - `K : [6]F` : Length of the transaction index in nibbles +/// - `K : [4]F` : Length of the transaction index in nibbles const K_RANGE: PublicInputRange = H_RANGE.end..H_RANGE.end + MAX_INDEX_NIBBLES; -/// `T : F` pointer in the MPT indicating portion of the key already traversed (from 6 → 0) +/// `T : F` pointer in the MPT indicating portion of the key already traversed (from 4 → 0) const T_RANGE: PublicInputRange = K_RANGE.end..K_RANGE.end + 1; /// - `DV : Digest[F]` : value digest of all rows to extract const DV_RANGE: PublicInputRange = T_RANGE.end..T_RANGE.end + CURVE_TARGET_LEN; diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 2cf084122..a1bcaa6a8 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -185,7 +185,7 @@ macro_rules! impl_branch_circuits { } /// generates a proof from the inputs stored in `branch`. Depending on the size of the node, /// and the number of children proofs, it selects the right specialized circuit to generate the proof. - pub fn generate_proof( + fn generate_proof( &self, set: &RecursiveCircuits, branch_node: InputNode, diff --git a/rustc-ice-2024-11-04T12_36_50-74186.txt b/rustc-ice-2024-11-04T12_36_50-74186.txt deleted file mode 100644 index d48781bb7..000000000 --- a/rustc-ice-2024-11-04T12_36_50-74186.txt +++ /dev/null @@ -1,63 +0,0 @@ -thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: -const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] -stack backtrace: - 0: 0x11209ec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea - 1: 0x10ff1b468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call - 2: 0x1120b9608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a - 3: 0x1120b9260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 - 4: 0x1120b6e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 - 5: 0x1120b8f24 - _rust_begin_unwind - 6: 0x1147a7ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 - 7: 0x1148ddc1c - >::const_param_out_of_range - 8: 0x110de5ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const - 9: 0x110db651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> - 10: 0x110daa120 - >::super_fold_with::> - 11: 0x110cf9f18 - >::super_fold_with::> - 12: 0x110d70d94 - <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 13: 0x110cf7c2c - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 14: 0x110cf73b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 15: 0x110df372c - >::try_fold_with::> - 16: 0x110dc5a1c - ::instantiate_into - 17: 0x111cc9848 - ::nominal_obligations - 18: 0x111cc8710 - >::visit_const - 19: 0x111cc7b58 - >::visit_ty - 20: 0x111cc5db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations - 21: 0x111e3b15c - ::process_obligation - 22: 0x111e1c724 - >::process_obligations:: - 23: 0x111e383c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible - 24: 0x111c66608 - >::assumed_wf_types_and_report_errors - 25: 0x110376c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed - 26: 0x11160ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 27: 0x1117112e0 - >::call_once - 28: 0x1115abf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 29: 0x111788630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace - 30: 0x11036a5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> - 31: 0x11037d898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf - 32: 0x11160ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 33: 0x111711048 - >::call_once - 34: 0x11156cf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 35: 0x111775ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace - 36: 0x11036534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> - 37: 0x11041513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate - 38: 0x1108bb918 - rustc_interface[6b7e568f89869ca2]::passes::analysis - 39: 0x11160e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 40: 0x1116b2cf0 - >::call_once - 41: 0x11152ae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 42: 0x1117636ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace - 43: 0x10ff66ee0 - ::enter::> - 44: 0x10ff34448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 45: 0x10ff81978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> - 46: 0x10ff7e0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 47: 0x10ff7edb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} - 48: 0x1120c3a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 - 49: 0x18b24ef94 - __pthread_joiner_wake - - -rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) -platform: aarch64-apple-darwin - -query stack during panic: -#0 [check_well_formed] checking that `mpt_sequential::` is well-formed -#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` -#2 [analysis] running analysis passes on this crate -end of query stack diff --git a/rustc-ice-2024-11-04T12_37_01-74253.txt b/rustc-ice-2024-11-04T12_37_01-74253.txt deleted file mode 100644 index 6bcecf0f7..000000000 --- a/rustc-ice-2024-11-04T12_37_01-74253.txt +++ /dev/null @@ -1,62 +0,0 @@ -thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: -const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] -stack backtrace: - 0: 0x110a2ec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea - 1: 0x10e8ab468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call - 2: 0x110a49608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a - 3: 0x110a49260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 - 4: 0x110a46e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 - 5: 0x110a48f24 - _rust_begin_unwind - 6: 0x113137ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 - 7: 0x11326dc1c - >::const_param_out_of_range - 8: 0x10f775ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const - 9: 0x10f74651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> - 10: 0x10f73a120 - >::super_fold_with::> - 11: 0x10f689f18 - >::super_fold_with::> - 12: 0x10f687ca0 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 13: 0x10f6873b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 14: 0x10f78372c - >::try_fold_with::> - 15: 0x10f755a1c - ::instantiate_into - 16: 0x110659848 - ::nominal_obligations - 17: 0x110658710 - >::visit_const - 18: 0x110657b58 - >::visit_ty - 19: 0x110655db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations - 20: 0x1107cb15c - ::process_obligation - 21: 0x1107ac724 - >::process_obligations:: - 22: 0x1107c83c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible - 23: 0x1105f6608 - >::assumed_wf_types_and_report_errors - 24: 0x10ed06c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed - 25: 0x10ff9ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 26: 0x1100a12e0 - >::call_once - 27: 0x10ff3bf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 28: 0x110118630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace - 29: 0x10ecfa5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> - 30: 0x10ed0d898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf - 31: 0x10ff9ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 32: 0x1100a1048 - >::call_once - 33: 0x10fefcf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 34: 0x110105ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace - 35: 0x10ecf534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> - 36: 0x10eda513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate - 37: 0x10f24b918 - rustc_interface[6b7e568f89869ca2]::passes::analysis - 38: 0x10ff9e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 39: 0x110042cf0 - >::call_once - 40: 0x10febae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 41: 0x1100f36ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace - 42: 0x10e8f6ee0 - ::enter::> - 43: 0x10e8c4448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 44: 0x10e911978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> - 45: 0x10e90e0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 46: 0x10e90edb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} - 47: 0x110a53a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 - 48: 0x18b24ef94 - __pthread_joiner_wake - - -rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) -platform: aarch64-apple-darwin - -query stack during panic: -#0 [check_well_formed] checking that `mpt_sequential::` is well-formed -#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` -#2 [analysis] running analysis passes on this crate -end of query stack diff --git a/rustc-ice-2024-11-04T12_37_13-74307.txt b/rustc-ice-2024-11-04T12_37_13-74307.txt deleted file mode 100644 index 6eb26635b..000000000 --- a/rustc-ice-2024-11-04T12_37_13-74307.txt +++ /dev/null @@ -1,62 +0,0 @@ -thread 'rustc' panicked at /rustc/3f1be1ec7ec3d8e80beb381ee82164a0aa3ca777/compiler/rustc_type_ir/src/binder.rs:777:9: -const parameter `KEY_LEN_BYTES/#3` (KEY_LEN_BYTES/#3/3) out of range when instantiating args=[DEPTH/#0, NODE_LEN/#1, KEY_LEN/#2] -stack backtrace: - 0: 0x10e1cec0c - std::backtrace::Backtrace::create::hd2b9e24a71fd24ea - 1: 0x10c04b468 - as core[78ac8d9058276e2b]::ops::function::Fn<(&dyn for<'a, 'b> core[78ac8d9058276e2b]::ops::function::Fn<(&'a std[25544cbdc54c9068]::panic::PanicHookInfo<'b>,), Output = ()> + core[78ac8d9058276e2b]::marker::Sync + core[78ac8d9058276e2b]::marker::Send, &std[25544cbdc54c9068]::panic::PanicHookInfo)>>::call - 2: 0x10e1e9608 - std::panicking::rust_panic_with_hook::hbaa3501f6245c05a - 3: 0x10e1e9260 - std::panicking::begin_panic_handler::{{closure}}::hd341aa107154c508 - 4: 0x10e1e6e28 - std::sys::backtrace::__rust_end_short_backtrace::hca058610990f2143 - 5: 0x10e1e8f24 - _rust_begin_unwind - 6: 0x1108d7ee4 - core::panicking::panic_fmt::h81353f1686d3b9a2 - 7: 0x110a0dc1c - >::const_param_out_of_range - 8: 0x10cf15ebc - as rustc_type_ir[47614f3ecd88d1ff]::fold::FallibleTypeFolder>::try_fold_const - 9: 0x10cee651c - rustc_middle[71f41ea3d2538dcd]::ty::util::fold_list::, &rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg>, rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg, <&rustc_middle[71f41ea3d2538dcd]::ty::list::RawList<(), rustc_middle[71f41ea3d2538dcd]::ty::generic_args::GenericArg> as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with>::{closure#0}> - 10: 0x10ceda120 - >::super_fold_with::> - 11: 0x10ce29f18 - >::super_fold_with::> - 12: 0x10ce27ca0 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 13: 0x10ce273b8 - as rustc_type_ir[47614f3ecd88d1ff]::fold::TypeFoldable>::try_fold_with::> - 14: 0x10cf2372c - >::try_fold_with::> - 15: 0x10cef5a1c - ::instantiate_into - 16: 0x10ddf9848 - ::nominal_obligations - 17: 0x10ddf8710 - >::visit_const - 18: 0x10ddf7b58 - >::visit_ty - 19: 0x10ddf5db0 - rustc_trait_selection[55a89e4d0d7ea7c6]::traits::wf::obligations - 20: 0x10df6b15c - ::process_obligation - 21: 0x10df4c724 - >::process_obligations:: - 22: 0x10df683c4 - as rustc_infer[3d6a6834044a20c4]::traits::engine::TraitEngine>::select_where_possible - 23: 0x10dd96608 - >::assumed_wf_types_and_report_errors - 24: 0x10c4a6c6c - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_well_formed - 25: 0x10d73ad34 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 26: 0x10d8412e0 - >::call_once - 27: 0x10d6dbf1c - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 28: 0x10d8b8630 - rustc_query_impl[30466c14bdba48]::query_impl::check_well_formed::get_query_incr::__rust_end_short_backtrace - 29: 0x10c49a5ec - rustc_middle[71f41ea3d2538dcd]::query::plumbing::query_ensure_error_guaranteed::>, ()> - 30: 0x10c4ad898 - rustc_hir_analysis[6576f1f28a8b13c4]::check::wfcheck::check_mod_type_wf - 31: 0x10d73ad10 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 32: 0x10d841048 - >::call_once - 33: 0x10d69cf28 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 34: 0x10d8a5ecc - rustc_query_impl[30466c14bdba48]::query_impl::check_mod_type_wf::get_query_incr::__rust_end_short_backtrace - 35: 0x10c49534c - ::run::<(), rustc_data_structures[3bb601c435a2842f]::sync::parallel::enabled::par_for_each_in<&rustc_hir[c448669f75bf36d2]::hir_id::OwnerId, &[rustc_hir[c448669f75bf36d2]::hir_id::OwnerId], ::par_for_each_module::{closure#0}>::{closure#0}::{closure#1}::{closure#0}> - 36: 0x10c54513c - rustc_hir_analysis[6576f1f28a8b13c4]::check_crate - 37: 0x10c9eb918 - rustc_interface[6b7e568f89869ca2]::passes::analysis - 38: 0x10d73e944 - rustc_query_impl[30466c14bdba48]::plumbing::__rust_begin_short_backtrace::> - 39: 0x10d7e2cf0 - >::call_once - 40: 0x10d65ae34 - rustc_query_system[972cd5053bb6237d]::query::plumbing::try_execute_query::>, false, false, false>, rustc_query_impl[30466c14bdba48]::plumbing::QueryCtxt, true> - 41: 0x10d8936ec - rustc_query_impl[30466c14bdba48]::query_impl::analysis::get_query_incr::__rust_end_short_backtrace - 42: 0x10c096ee0 - ::enter::> - 43: 0x10c064448 - ::enter::, rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 44: 0x10c0b1978 - rustc_span[8c398afceecb6ede]::create_session_globals_then::, rustc_interface[6b7e568f89869ca2]::util::run_in_thread_with_globals, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}::{closure#0}> - 45: 0x10c0ae0b8 - std[25544cbdc54c9068]::sys::backtrace::__rust_begin_short_backtrace::, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>> - 46: 0x10c0aedb8 - <::spawn_unchecked_, rustc_driver_impl[763c4ce7974ba5fb]::run_compiler::{closure#0}>::{closure#1}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#0}::{closure#0}, core[78ac8d9058276e2b]::result::Result<(), rustc_span[8c398afceecb6ede]::ErrorGuaranteed>>::{closure#1} as core[78ac8d9058276e2b]::ops::function::FnOnce<()>>::call_once::{shim:vtable#0} - 47: 0x10e1f3a78 - std::sys::pal::unix::thread::Thread::new::thread_start::h9a782c2ee1570786 - 48: 0x18b24ef94 - __pthread_joiner_wake - - -rustc version: 1.84.0-nightly (3f1be1ec7 2024-10-28) -platform: aarch64-apple-darwin - -query stack during panic: -#0 [check_well_formed] checking that `mpt_sequential::` is well-formed -#1 [check_mod_type_wf] checking that types are well-formed in module `mpt_sequential` -#2 [analysis] running analysis passes on this crate -end of query stack From 13d3cecbd24366aab0ff17aa0871242a257cb704 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 27 Nov 2024 09:26:31 +0000 Subject: [PATCH 08/15] Value digest computation corrected --- mp2-common/src/eth.rs | 356 ++++++++++-------- mp2-common/src/mpt_sequential/mod.rs | 2 +- mp2-common/src/rlp.rs | 2 +- mp2-test/src/mpt_sequential.rs | 2 +- mp2-v1/src/receipt_extraction/leaf.rs | 109 ++++-- mp2-v1/src/receipt_extraction/mod.rs | 60 ++- .../src/receipt_extraction/public_inputs.rs | 4 +- mp2-v1/tests/common/block_extraction.rs | 2 +- 8 files changed, 338 insertions(+), 199 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 3e8e3fa2d..d5721fbcd 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -7,7 +7,7 @@ use alloy::{ network::{eip2718::Encodable2718, BlockResponse, TransactionResponse}, primitives::{Address, Log, LogData, B256}, providers::{Provider, RootProvider}, - rlp::Encodable as AlloyEncodable, + rlp::{Decodable, Encodable as AlloyEncodable}, rpc::types::{ Block, BlockTransactions, EIP1186AccountProofResponse, Filter, ReceiptEnvelope, Transaction, }, @@ -130,6 +130,8 @@ pub struct ReceiptQuery { pub struct ReceiptProofInfo { /// The MPT proof that this Receipt is in the tree pub mpt_proof: Vec>, + /// The root of the Receipt Trie this receipt belongs to + pub mpt_root: H256, /// The index of this transaction in the block pub tx_index: u64, /// The size of the index in bytes @@ -227,7 +229,7 @@ impl TryFrom<&Log> for EventLogInfo { .take(remaining_topics) .for_each(|(j, info)| { *info = LogDataInfo { - column_id: j, + column_id: j + 1, rel_byte_offset: current_topic_offset, len: 32, }; @@ -260,7 +262,7 @@ impl TryFrom<&Log> for EventLogInfo { let chunk_header = chunk_rlp.payload_info()?; if chunk_header.value_len <= 32 { data[j] = LogDataInfo { - column_id: 3 + j, + column_id: remaining_topics + 1 + j, rel_byte_offset: current_topic_offset + additional_offset + chunk_header.header_len, @@ -426,6 +428,23 @@ impl ProofQuery { } } +impl ReceiptProofInfo { + pub fn to_receipt(&self) -> Result { + let memdb = Arc::new(MemoryDB::new(true)); + let tx_trie = EthTrie::new(Arc::clone(&memdb)); + + let mpt_key = self.tx_index.rlp_bytes(); + + let valid = tx_trie + .verify_proof(self.mpt_root, &mpt_key, self.mpt_proof.clone())? + .ok_or(anyhow!("No proof found when verifying"))?; + + let rlp_receipt = rlp::Rlp::new(&valid[1..]); + ReceiptWithBloom::decode(&mut rlp_receipt.as_raw()) + .map_err(|e| anyhow!("Could not decode receipt got: {}", e)) + } +} + impl ReceiptQuery { pub fn new(contract: Address, event: Event) -> Self { Self { contract, event } @@ -462,7 +481,7 @@ impl ReceiptQuery { // Construct the Receipt Trie for this block so we can retrieve MPT proofs. let mut block_util = BlockUtil::fetch(provider, block).await?; - + let mpt_root = block_util.receipts_trie.root_hash()?; let proofs = tx_indices .into_iter() .map(|index| { @@ -527,6 +546,7 @@ impl ReceiptQuery { Ok(ReceiptProofInfo { mpt_proof: proof, + mpt_root, tx_index: index, index_size, status_offset, @@ -602,7 +622,7 @@ impl BlockUtil { let mut transactions_trie = EthTrie::new(memdb.clone()); let consensus_receipts = receipts .into_iter() - .zip(all_tx.into_iter()) + .zip(all_tx.iter()) .map(|(receipt, transaction)| { let tx_index = receipt.transaction_index.unwrap().rlp_bytes(); @@ -678,157 +698,6 @@ fn from_rpc_logs_to_consensus( } } -// for compatibility check with alloy -#[cfg(test)] -mod tryethers { - - use std::sync::Arc; - - use anyhow::Result; - use eth_trie::{EthTrie, MemoryDB, Trie}; - use ethers::{ - providers::{Http, Middleware, Provider}, - types::{BlockId, Bytes, Transaction, TransactionReceipt, U64}, - }; - use rlp::{Encodable, RlpStream}; - - /// A wrapper around a transaction and its receipt. The receipt is used to filter - /// bad transactions, so we only compute over valid transactions. - pub struct TxAndReceipt(Transaction, TransactionReceipt); - - impl TxAndReceipt { - pub fn tx(&self) -> &Transaction { - &self.0 - } - pub fn receipt(&self) -> &TransactionReceipt { - &self.1 - } - pub fn tx_rlp(&self) -> Bytes { - self.0.rlp() - } - // TODO: this should be upstreamed to ethers-rs - pub fn receipt_rlp(&self) -> Bytes { - let tx_type = self.tx().transaction_type; - let mut rlp = RlpStream::new(); - rlp.begin_unbounded_list(); - match &self.1.status { - Some(s) if s.as_u32() == 1 => rlp.append(s), - _ => rlp.append_empty_data(), - }; - rlp.append(&self.1.cumulative_gas_used) - .append(&self.1.logs_bloom) - .append_list(&self.1.logs); - - rlp.finalize_unbounded_list(); - let rlp_bytes: Bytes = rlp.out().freeze().into(); - let mut encoded = vec![]; - match tx_type { - // EIP-2930 (0x01) - Some(x) if x == U64::from(0x1) => { - encoded.extend_from_slice(&[0x1]); - encoded.extend_from_slice(rlp_bytes.as_ref()); - encoded.into() - } - // EIP-1559 (0x02) - Some(x) if x == U64::from(0x2) => { - encoded.extend_from_slice(&[0x2]); - encoded.extend_from_slice(rlp_bytes.as_ref()); - encoded.into() - } - _ => rlp_bytes, - } - } - } - /// Structure containing the block header and its transactions / receipts. Amongst other things, - /// it is used to create a proof of inclusion for any transaction inside this block. - pub struct BlockData { - pub block: ethers::types::Block, - pub txs: Vec, - // TODO: add generics later - this may be re-used amongst different workers - pub tx_trie: EthTrie, - pub receipts_trie: EthTrie, - } - - impl BlockData { - pub async fn fetch + Send + Sync>( - blockid: T, - url: String, - ) -> Result { - let provider = - Provider::::try_from(url).expect("could not instantiate HTTP Provider"); - Self::fetch_from(&provider, blockid).await - } - pub async fn fetch_from + Send + Sync>( - provider: &Provider, - blockid: T, - ) -> Result { - let block = provider - .get_block_with_txs(blockid) - .await? - .expect("should have been a block"); - let receipts = provider.get_block_receipts(block.number.unwrap()).await?; - - let tx_with_receipt = block - .transactions - .clone() - .into_iter() - .map(|tx| { - let tx_hash = tx.hash(); - let r = receipts - .iter() - .find(|r| r.transaction_hash == tx_hash) - .expect("RPC sending invalid data"); - // TODO remove cloning - TxAndReceipt(tx, r.clone()) - }) - .collect::>(); - - // check transaction root - let memdb = Arc::new(MemoryDB::new(true)); - let mut tx_trie = EthTrie::new(Arc::clone(&memdb)); - for tr in tx_with_receipt.iter() { - tx_trie - .insert(&tr.receipt().transaction_index.rlp_bytes(), &tr.tx().rlp()) - .expect("can't insert tx"); - } - - // check receipt root - let memdb = Arc::new(MemoryDB::new(true)); - let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); - for tr in tx_with_receipt.iter() { - if tr.tx().transaction_index.unwrap() == U64::from(0) { - println!( - "Ethers: Index {} -> {:?}", - tr.tx().transaction_index.unwrap(), - tr.receipt_rlp().to_vec() - ); - } - receipts_trie - .insert( - &tr.receipt().transaction_index.rlp_bytes(), - // TODO: make getter value for rlp encoding - &tr.receipt_rlp(), - ) - .expect("can't insert tx"); - } - let computed = tx_trie.root_hash().expect("root hash problem"); - let expected = block.transactions_root; - assert_eq!(expected, computed); - - let computed = receipts_trie.root_hash().expect("root hash problem"); - let expected = block.receipts_root; - assert_eq!(expected, computed); - - Ok(BlockData { - block, - tx_trie, - receipts_trie, - txs: tx_with_receipt, - }) - } - } -} - #[cfg(test)] mod test { #[cfg(feature = "ci")] @@ -843,7 +712,6 @@ mod test { rlp::Decodable, sol, }; - use alloy_multicall::Multicall; use eth_trie::Nibbles; use ethereum_types::U64; @@ -873,11 +741,12 @@ mod test { // check if we compute the RLP correctly now block.check()?; let mut be = tryethers::BlockData::fetch(bn, url).await?; + be.check()?; let er = be.receipts_trie.root_hash()?; let ar = block.receipts_trie.root_hash()?; assert_eq!(er, ar); // dissect one receipt entry in the trie - let tx_receipt = block.txs.first().clone().unwrap(); + let tx_receipt = block.txs.first().unwrap(); // https://sepolia.etherscan.io/tx/0x9bef12fafd3962b0e0d66b738445d6ea2c1f3daabe10c889bd1916acc75d698b#eventlog println!( "Looking at tx hash on sepolia: {}", @@ -946,7 +815,7 @@ mod test { // final is tokenid - not in topic let expected_data = "000000000000000000000000000000000000000000115eec47f6cf7e35000000"; let log_data: Vec = log_state.val_at(2).context("can't decode log data")?; - let found_data = hex::encode(&left_pad32( + let found_data = hex::encode(left_pad32( &log_data.into_iter().take(32).collect::>(), )); assert_eq!(expected_data, found_data); @@ -1023,13 +892,6 @@ mod test { // Deploy the contract using anvil let contract = EventEmitter::deploy(rpc.clone()).await?; - // (0..10).for_each(|j| { - // match i % 2 { - // 0 => multicall.add_call(), - // 1 => contract.twoEmits().into_transaction_request(), - // _ => unreachable!(), - // } - // }); let tx_reqs = (0..10) .map(|i| match i % 2 { 0 => contract.testEmit().into_transaction_request(), @@ -1085,7 +947,7 @@ mod test { .ok_or(anyhow!("Could not get block test"))?; let receipt_hash = block.header().receipts_root; let proofs = receipt_query - .query_receipt_proofs(&rpc.root(), BlockNumberOrTag::Number(block_number)) + .query_receipt_proofs(rpc.root(), BlockNumberOrTag::Number(block_number)) .await?; // Now for each transaction we fetch the block, then get the MPT Trie proof that the receipt is included and verify it @@ -1424,4 +1286,164 @@ mod test { rlp.append(inner); } } + // for compatibility check with alloy + mod tryethers { + + use std::sync::Arc; + + use anyhow::Result; + use eth_trie::{EthTrie, MemoryDB, Trie}; + use ethers::{ + providers::{Http, Middleware, Provider}, + types::{BlockId, Bytes, Transaction, TransactionReceipt, U64}, + }; + use rlp::{Encodable, RlpStream}; + + /// A wrapper around a transaction and its receipt. The receipt is used to filter + /// bad transactions, so we only compute over valid transactions. + pub struct TxAndReceipt(Transaction, TransactionReceipt); + + impl TxAndReceipt { + pub fn tx(&self) -> &Transaction { + &self.0 + } + pub fn receipt(&self) -> &TransactionReceipt { + &self.1 + } + pub fn tx_rlp(&self) -> Bytes { + self.0.rlp() + } + // TODO: this should be upstreamed to ethers-rs + pub fn receipt_rlp(&self) -> Bytes { + let tx_type = self.tx().transaction_type; + let mut rlp = RlpStream::new(); + rlp.begin_unbounded_list(); + match &self.1.status { + Some(s) if s.as_u32() == 1 => rlp.append(s), + _ => rlp.append_empty_data(), + }; + rlp.append(&self.1.cumulative_gas_used) + .append(&self.1.logs_bloom) + .append_list(&self.1.logs); + + rlp.finalize_unbounded_list(); + let rlp_bytes: Bytes = rlp.out().freeze().into(); + let mut encoded = vec![]; + match tx_type { + // EIP-2930 (0x01) + Some(x) if x == U64::from(0x1) => { + encoded.extend_from_slice(&[0x1]); + encoded.extend_from_slice(rlp_bytes.as_ref()); + encoded.into() + } + // EIP-1559 (0x02) + Some(x) if x == U64::from(0x2) => { + encoded.extend_from_slice(&[0x2]); + encoded.extend_from_slice(rlp_bytes.as_ref()); + encoded.into() + } + _ => rlp_bytes, + } + } + } + /// Structure containing the block header and its transactions / receipts. Amongst other things, + /// it is used to create a proof of inclusion for any transaction inside this block. + pub struct BlockData { + pub block: ethers::types::Block, + // TODO: add generics later - this may be re-used amongst different workers + pub tx_trie: EthTrie, + pub receipts_trie: EthTrie, + } + + impl BlockData { + pub async fn fetch + Send + Sync>( + blockid: T, + url: String, + ) -> Result { + let provider = + Provider::::try_from(url).expect("could not instantiate HTTP Provider"); + Self::fetch_from(&provider, blockid).await + } + pub async fn fetch_from + Send + Sync>( + provider: &Provider, + blockid: T, + ) -> Result { + let block = provider + .get_block_with_txs(blockid) + .await? + .expect("should have been a block"); + let receipts = provider.get_block_receipts(block.number.unwrap()).await?; + + let tx_with_receipt = block + .transactions + .clone() + .into_iter() + .map(|tx| { + let tx_hash = tx.hash(); + let r = receipts + .iter() + .find(|r| r.transaction_hash == tx_hash) + .expect("RPC sending invalid data"); + // TODO remove cloning + TxAndReceipt(tx, r.clone()) + }) + .collect::>(); + + // check transaction root + let memdb = Arc::new(MemoryDB::new(true)); + let mut tx_trie = EthTrie::new(Arc::clone(&memdb)); + for tr in tx_with_receipt.iter() { + tx_trie + .insert(&tr.receipt().transaction_index.rlp_bytes(), &tr.tx_rlp()) + .expect("can't insert tx"); + } + + // check receipt root + let memdb = Arc::new(MemoryDB::new(true)); + let mut receipts_trie = EthTrie::new(Arc::clone(&memdb)); + for tr in tx_with_receipt.iter() { + if tr.tx().transaction_index.unwrap() == U64::from(0) { + println!( + "Ethers: Index {} -> {:?}", + tr.tx().transaction_index.unwrap(), + tr.receipt_rlp().to_vec() + ); + } + receipts_trie + .insert( + &tr.receipt().transaction_index.rlp_bytes(), + // TODO: make getter value for rlp encoding + &tr.receipt_rlp(), + ) + .expect("can't insert tx"); + } + let computed = tx_trie.root_hash().expect("root hash problem"); + let expected = block.transactions_root; + assert_eq!(expected, computed); + + let computed = receipts_trie.root_hash().expect("root hash problem"); + let expected = block.receipts_root; + assert_eq!(expected, computed); + + Ok(BlockData { + block, + tx_trie, + receipts_trie, + }) + } + + // recompute the receipts trie by first converting all receipts form RPC type to consensus type + // since in Alloy these are two different types and RLP functions are only implemented for + // consensus ones. + pub fn check(&mut self) -> Result<()> { + let computed = self.receipts_trie.root_hash()?; + let tx_computed = self.tx_trie.root_hash()?; + let expected = self.block.receipts_root; + let tx_expected = self.block.transactions_root; + assert_eq!(expected.0, computed.0); + assert_eq!(tx_expected.0, tx_computed.0); + Ok(()) + } + } + } } diff --git a/mp2-common/src/mpt_sequential/mod.rs b/mp2-common/src/mpt_sequential/mod.rs index e4518401a..81e0d6286 100644 --- a/mp2-common/src/mpt_sequential/mod.rs +++ b/mp2-common/src/mpt_sequential/mod.rs @@ -361,7 +361,7 @@ where /// * The key where to lookup the next nibble and thus the hash stored at /// nibble position in the branch node. /// * RLP headers of the current node. -/// And it returns: +/// And it returns: /// * New key with the pointer moved. /// * The child hash / value of the node. /// * A boolean that must be true if the given node is a leaf or an extension. diff --git a/mp2-common/src/rlp.rs b/mp2-common/src/rlp.rs index 3c50eb8cc..01d6824ab 100644 --- a/mp2-common/src/rlp.rs +++ b/mp2-common/src/rlp.rs @@ -16,7 +16,7 @@ const MAX_LEN_BYTES: usize = 2; /// Maximum size a key can have inside a MPT node. /// 33 bytes because key is compacted encoded, so it can add up to 1 byte more. -const MAX_ENC_KEY_LEN: usize = 33; +pub const MAX_ENC_KEY_LEN: usize = 33; /// Simply the maximum number of nibbles a key can have. pub const MAX_KEY_NIBBLE_LEN: usize = 64; diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 3ab1346e1..70080429a 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -168,7 +168,7 @@ pub fn generate_receipt_proofs() -> Vec { let receipt_query = ReceiptQuery::new(*event_contract.address(), events[0].clone()); receipt_query - .query_receipt_proofs(&rpc.root(), BlockNumberOrTag::Number(block_number)) + .query_receipt_proofs(rpc.root(), BlockNumberOrTag::Number(block_number)) .await .unwrap() }) diff --git a/mp2-v1/src/receipt_extraction/leaf.rs b/mp2-v1/src/receipt_extraction/leaf.rs index 8fca8a1c5..429f46bd9 100644 --- a/mp2-v1/src/receipt_extraction/leaf.rs +++ b/mp2-v1/src/receipt_extraction/leaf.rs @@ -12,7 +12,7 @@ use mp2_common::{ mpt_sequential::{MPTReceiptLeafNode, ReceiptKeyWire, MAX_TX_KEY_NIBBLE_LEN, PAD_LEN}, public_inputs::PublicInputCommon, types::{CBuilder, GFp}, - utils::{Endianness, PackerTarget}, + utils::{less_than, less_than_or_equal_to, Endianness, PackerTarget}, D, F, }; use plonky2::{ @@ -129,17 +129,66 @@ impl EventWires { &self, b: &mut CBuilder, value: &VectorWire, - status_offset: Target, relevant_logs_offsets: &VectorWire, ) -> CurveTarget { let t = b._true(); + let one = b.one(); + let two = b.two(); let zero = b.zero(); let curve_zero = b.curve_zero(); let mut points = Vec::new(); - // Enforce status is true. - let status = value.arr.random_access_large_array(b, status_offset); - b.connect(status, t.target); + // Extract the gas used in the transaction, since the position of this can vary because it is after the key + // we have to prove we extracted from the correct location. + let header_len_len = b.add_const( + value.arr[0], + F::from_canonical_u64(1) - F::from_canonical_u64(247), + ); + // let key_header = value.arr.random_access_large_array(b, header_len_len); + // let key_header_len = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); + + // This is the start of the string that is the rlp encoded receipt (a string since the first element is transaction type). + // From here we subtract 183 to get the length of the length, then the encoded gas used is at length of length + 1 (for tx type) + (1 + list length) + // + 1 (for status) + 1 to get the header for the gas used string. + let string_offset = b.add(one, header_len_len); + let string_header = value.arr.random_access_large_array(b, string_offset); + let string_len_len = b.add_const(string_header, -F::from_canonical_u64(183)); + + let list_offset = b.add_many([string_offset, string_len_len, two]); + let list_header = value.arr.random_access_large_array(b, list_offset); + + let gas_used_offset_lo = b.add_const( + list_header, + F::from_canonical_u64(2) - F::from_canonical_u64(247), + ); + let gas_used_offset = b.add(gas_used_offset_lo, list_offset); + + let gas_used_header = value.arr.random_access_large_array(b, gas_used_offset); + let gas_used_len = b.add_const(gas_used_header, -F::from_canonical_u64(128)); + + let initial_gas_index = b.add(gas_used_offset, one); + let final_gas_index = b.add(gas_used_offset, gas_used_len); + + let combiner = b.constant(F::from_canonical_u64(1 << 8)); + + let gas_used = (0..3u64).fold(zero, |acc, i| { + let access_index = b.add_const(initial_gas_index, F::from_canonical_u64(i)); + let array_value = value.arr.random_access_large_array(b, access_index); + + // If we have extracted a value from an index in the desired range (so lte final_gas_index) we want to add it. + // If access_index was strictly less than final_gas_index we need to multiply by 1 << 8 after (since the encoding is big endian) + let valid = less_than_or_equal_to(b, access_index, final_gas_index, 12); + let need_scalar = less_than(b, access_index, final_gas_index, 12); + + let to_add = b.select(valid, array_value, zero); + + let scalar = b.select(need_scalar, combiner, one); + let tmp = b.add(acc, to_add); + b.mul(tmp, scalar) + }); + + // Map the gas used to a curve point for the value digest, gas used is the first column so use one as its column id. + let gas_digest = b.map_to_curve_point(&[zero, gas_used]); for log_offset in relevant_logs_offsets.arr.arr { // Extract the address bytes @@ -179,13 +228,17 @@ impl EventWires { // For each column we use the `column_id` field to tell if its a dummy or not, zero indicates a dummy. let dummy_column = b.is_equal(log_column.column_id, zero); - let selector = b.and(dummy_column, dummy); - let selected_point = b.select_curve_point(selector, curve_zero, data_digest); + let selected_point = b.select_curve_point(dummy_column, curve_zero, data_digest); + let selected_point = b.select_curve_point(dummy, curve_zero, selected_point); + points.push(selected_point); } - } + let gas_select = b.select_curve_point(dummy, curve_zero, gas_digest); + points.push(gas_select); + } + println!("points length: {}", points.len()); b.add_curve_point(&points) } } @@ -218,13 +271,9 @@ where let root = wires.root; // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for - let mut dv = event_wires.verify_logs_and_extract_values::( - b, - &node, - status_offset, - &relevant_logs_offset, - ); - + let mut dv = + event_wires.verify_logs_and_extract_values::(b, &node, &relevant_logs_offset); + println!("dv target: {:?}", dv); let value_id = b.map_to_curve_point(&[index]); dv = b.add_curve_point(&[value_id, dv]); @@ -356,17 +405,16 @@ where wires .address - .assign(pw, &address.0.map(|byte| GFp::from_canonical_u8(byte))); + .assign(pw, &address.0.map(GFp::from_canonical_u8)); pw.set_target( wires.add_rel_offset, F::from_canonical_usize(add_rel_offset), ); - wires.event_signature.assign( - pw, - &event_signature.map(|byte| GFp::from_canonical_u8(byte)), - ); + wires + .event_signature + .assign(pw, &event_signature.map(GFp::from_canonical_u8)); pw.set_target( wires.sig_rel_offset, @@ -376,12 +424,12 @@ where wires .topics .iter() - .zip(topics.into_iter()) + .zip(topics) .for_each(|(topic_wire, topic)| topic_wire.assign(pw, topic)); wires .data .iter() - .zip(data.into_iter()) + .zip(data) .for_each(|(data_wire, data)| data_wire.assign(pw, data)); } } @@ -415,7 +463,9 @@ impl CircuitLogicWires for ReceiptLeafWires, wires: &Self::Wires) { - self.c.assign(pw, &wires); + self.c.assign(pw, wires); } } #[test] @@ -450,18 +500,27 @@ mod tests { let receipt_proof_infos = generate_receipt_proofs(); let info = receipt_proof_infos.first().unwrap().clone(); + let c = ReceiptLeafCircuit:: { info: info.clone() }; let test_circuit = TestReceiptLeafCircuit { c }; + let node = info.mpt_proof.last().unwrap().clone(); + let proof = run_circuit::(test_circuit); let pi = PublicInputs::new(&proof.public_inputs); - let node = info.mpt_proof.last().unwrap().clone(); + // Check the output hash { let exp_hash = keccak256(&node).pack(Endianness::Little); assert_eq!(pi.root_hash(), exp_hash); } + // Check value digest + { + let exp_digest = compute_receipt_leaf_value_digest(&info); + assert_eq!(pi.values_digest(), exp_digest.to_weierstrass()); + } + // Check metadata digest { let exp_digest = compute_receipt_leaf_metadata_digest(&info.event_log_info); diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs index a21f7fc41..004a9cfea 100644 --- a/mp2-v1/src/receipt_extraction/mod.rs +++ b/mp2-v1/src/receipt_extraction/mod.rs @@ -1,8 +1,14 @@ pub mod leaf; pub mod public_inputs; +use alloy::{consensus::TxReceipt, primitives::IntoLogData}; + use mp2_common::{ - digest::Digest, eth::EventLogInfo, group_hashing::map_to_curve_point, types::GFp, + digest::Digest, + eth::{EventLogInfo, ReceiptProofInfo}, + group_hashing::map_to_curve_point, + types::GFp, + utils::{Packer, ToFields}, }; use plonky2::field::types::Field; @@ -31,3 +37,55 @@ pub fn compute_receipt_leaf_metadata_digest(event: &EventLogInfo) -> Digest { .collect::>(); map_to_curve_point(&data) } + +/// Calculate `value_digest` for receipt leaf. +pub fn compute_receipt_leaf_value_digest(receipt_proof_info: &ReceiptProofInfo) -> Digest { + let receipt = receipt_proof_info.to_receipt().unwrap(); + let gas_used = receipt.cumulative_gas_used(); + + // Only use events that we are indexing + let address = receipt_proof_info.event_log_info.address; + let sig = receipt_proof_info.event_log_info.event_signature; + + let index_digest = map_to_curve_point(&[GFp::from_canonical_u64(receipt_proof_info.tx_index)]); + + let gas_digest = map_to_curve_point(&[GFp::ZERO, GFp::from_noncanonical_u128(gas_used)]); + + receipt + .logs() + .iter() + .cloned() + .filter_map(|log| { + let log_address = log.address; + let log_data = log.to_log_data(); + let (topics, data) = log_data.split(); + + if log_address == address && topics[0].0 == sig { + let topics_field = topics + .iter() + .skip(1) + .map(|fixed| fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields()) + .collect::>(); + let data_fixed_bytes = data + .chunks(32) + .map(|chunk| chunk.pack(mp2_common::utils::Endianness::Big).to_fields()) + .take(2) + .collect::>(); + + Some( + topics_field + .iter() + .chain(data_fixed_bytes.iter()) + .enumerate() + .fold(gas_digest, |acc, (i, fixed)| { + let mut values = vec![GFp::from_canonical_usize(i) + GFp::ONE]; + values.extend_from_slice(fixed); + acc + map_to_curve_point(&values) + }), + ) + } else { + None + } + }) + .fold(index_digest, |acc, p| acc + p) +} diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs index e4fc8d5b9..2916c32bb 100644 --- a/mp2-v1/src/receipt_extraction/public_inputs.rs +++ b/mp2-v1/src/receipt_extraction/public_inputs.rs @@ -46,7 +46,7 @@ pub struct PublicInputArgs<'a> { pub(crate) dm: CurveTarget, } -impl<'a> PublicInputCommon for PublicInputArgs<'a> { +impl PublicInputCommon for PublicInputArgs<'_> { const RANGES: &'static [PublicInputRange] = &[H_RANGE, K_RANGE, T_RANGE, DV_RANGE, DM_RANGE]; fn register_args(&self, cb: &mut CBuilder) { @@ -61,7 +61,7 @@ impl<'a> PublicInputArgs<'a> { } } -impl<'a> PublicInputArgs<'a> { +impl PublicInputArgs<'_> { pub fn generic_register_args(&self, cb: &mut CBuilder) { self.h.register_as_public_input(cb); self.k.register_as_input(cb); diff --git a/mp2-v1/tests/common/block_extraction.rs b/mp2-v1/tests/common/block_extraction.rs index 933823e56..51b50c5c1 100644 --- a/mp2-v1/tests/common/block_extraction.rs +++ b/mp2-v1/tests/common/block_extraction.rs @@ -1,7 +1,7 @@ use alloy::primitives::U256; use anyhow::Result; use mp2_common::{ - eth::{left_pad_generic, BlockUtil, Rlpable}, + eth::Rlpable, proof::deserialize_proof, utils::{Endianness, Packer, ToFields}, C, D, F, From 64b8c9f689ce979e987b289e489a5d1e6c39d6ba Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Wed, 27 Nov 2024 12:13:39 +0000 Subject: [PATCH 09/15] Moved receipt value extraction location --- mp2-common/src/eth.rs | 4 +- mp2-v1/src/api.rs | 9 +- mp2-v1/src/block_extraction/circuit.rs | 38 +++- mp2-v1/src/block_extraction/mod.rs | 18 +- mp2-v1/src/final_extraction/api.rs | 28 ++- mp2-v1/src/final_extraction/mod.rs | 1 + .../src/final_extraction/receipt_circuit.rs | 213 ++++++++++++++++++ mp2-v1/src/lib.rs | 1 - mp2-v1/src/receipt_extraction/mod.rs | 91 -------- .../src/receipt_extraction/public_inputs.rs | 170 -------------- mp2-v1/src/values_extraction/api.rs | 25 +- .../leaf_receipt.rs} | 76 ++++--- mp2-v1/src/values_extraction/mod.rs | 87 ++++++- mp2-v1/tests/common/context.rs | 17 +- mp2-v1/tests/integrated_tests.rs | 4 +- 15 files changed, 457 insertions(+), 325 deletions(-) create mode 100644 mp2-v1/src/final_extraction/receipt_circuit.rs delete mode 100644 mp2-v1/src/receipt_extraction/mod.rs delete mode 100644 mp2-v1/src/receipt_extraction/public_inputs.rs rename mp2-v1/src/{receipt_extraction/leaf.rs => values_extraction/leaf_receipt.rs} (88%) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index d5721fbcd..9a36d30a8 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -229,7 +229,7 @@ impl TryFrom<&Log> for EventLogInfo { .take(remaining_topics) .for_each(|(j, info)| { *info = LogDataInfo { - column_id: j + 1, + column_id: j + 2, rel_byte_offset: current_topic_offset, len: 32, }; @@ -262,7 +262,7 @@ impl TryFrom<&Log> for EventLogInfo { let chunk_header = chunk_rlp.payload_info()?; if chunk_header.value_len <= 32 { data[j] = LogDataInfo { - column_id: remaining_topics + 1 + j, + column_id: remaining_topics + 2 + j, rel_byte_offset: current_topic_offset + additional_offset + chunk_header.header_len, diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 22c31accd..7bc957b75 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -79,7 +79,9 @@ impl PublicParameters { /// Instantiate the circuits employed for the pre-processing stage of LPN, /// returning their corresponding parameters -pub fn build_circuits_params() -> PublicParameters { +pub fn build_circuits_params( + extraction_type: block_extraction::ExtractionType, +) -> PublicParameters { log::info!("Building contract_extraction parameters..."); let contract_extraction = contract_extraction::build_circuits_params(); log::info!("Building length_extraction parameters..."); @@ -87,7 +89,7 @@ pub fn build_circuits_params() -> PublicParameters { log::info!("Building values_extraction parameters..."); let values_extraction = values_extraction::build_circuits_params(); log::info!("Building block_extraction parameters..."); - let block_extraction = block_extraction::build_circuits_params(); + let block_extraction = block_extraction::build_circuits_params(extraction_type); log::info!("Building final_extraction parameters..."); let final_extraction = final_extraction::PublicParameters::build( block_extraction.circuit_data().verifier_data(), @@ -141,6 +143,9 @@ pub fn generate_proof(params: &PublicParameters, input: CircuitInput) -> Result< length_circuit_set, ) } + final_extraction::CircuitInput::Receipt(input) => params + .final_extraction + .generate_receipt_proof(input, value_circuit_set), } } CircuitInput::CellsTree(input) => verifiable_db::api::generate_proof( diff --git a/mp2-v1/src/block_extraction/circuit.rs b/mp2-v1/src/block_extraction/circuit.rs index ceb6df077..4ba2c643d 100644 --- a/mp2-v1/src/block_extraction/circuit.rs +++ b/mp2-v1/src/block_extraction/circuit.rs @@ -22,6 +22,12 @@ const HEADER_PARENT_HASH_OFFSET: usize = 4; /// State root offset in RLP encoded header. const HEADER_STATE_ROOT_OFFSET: usize = 91; +/// Transaction root offset in RLP encoded header. +const HEADER_TRANSACTION_ROOT_OFFSET: usize = 124; + +/// Receipt root offset in RLP encoded header. +const HEADER_RECEIPT_ROOT_OFFSET: usize = 157; + /// Block number offset in RLP encoded header. const HEADER_BLOCK_NUMBER_OFFSET: usize = 449; /// We define u64 as the maximum block mnumber ever to be reached @@ -50,6 +56,25 @@ pub struct BlockCircuit { pub rlp_headers: Vec, } +/// Enum that represents the extraction type, storage, receipt or transaction +#[derive(Debug, Clone, Serialize, Deserialize, Copy)] +pub enum ExtractionType { + Storage, + Receipt, + Transaction, +} + +impl ExtractionType { + /// This function returns the offset of the relevant root for that type of extraction + pub fn offset(&self) -> usize { + match self { + ExtractionType::Storage => HEADER_STATE_ROOT_OFFSET, + ExtractionType::Receipt => HEADER_RECEIPT_ROOT_OFFSET, + ExtractionType::Transaction => HEADER_TRANSACTION_ROOT_OFFSET, + } + } +} + impl BlockCircuit { /// Creates a new instance of the circuit. pub fn new(rlp_headers: Vec) -> Result { @@ -61,7 +86,7 @@ impl BlockCircuit { } /// Build the circuit, assigning the public inputs and returning the internal wires. - pub fn build(cb: &mut CBuilder) -> BlockWires { + pub fn build(cb: &mut CBuilder, extraction_type: ExtractionType) -> BlockWires { // already right padded to right size for keccak let rlp_headers = VectorWire::new(cb); @@ -69,15 +94,16 @@ impl BlockCircuit { rlp_headers.assert_bytes(cb); // extract the previous block hash from the RLP header - let prev_bh = Array::::from_array(create_array(|i| { + let prev_bh: Array = Array::::from_array(create_array(|i| { rlp_headers.arr.arr[HEADER_PARENT_HASH_OFFSET + i] })); let packed_prev_bh = prev_bh.pack(cb, Endianness::Little).downcast_to_targets(); // extract the state root of the block - let state_root = Array::::from_array(create_array(|i| { - rlp_headers.arr.arr[HEADER_STATE_ROOT_OFFSET + i] - })); + let state_root: Array = + Array::::from_array(create_array(|i| { + rlp_headers.arr.arr[extraction_type.offset() + i] + })); let state_root_packed = state_root.pack(cb, Endianness::Little); // compute the block hash @@ -200,7 +226,7 @@ mod test { type Wires = BlockWires; fn build(cb: &mut CBuilder) -> Self::Wires { - Self::build(cb) + Self::build(cb, super::ExtractionType::Storage) } fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { diff --git a/mp2-v1/src/block_extraction/mod.rs b/mp2-v1/src/block_extraction/mod.rs index 9515ea5ef..af268f2b9 100644 --- a/mp2-v1/src/block_extraction/mod.rs +++ b/mp2-v1/src/block_extraction/mod.rs @@ -15,6 +15,7 @@ use mp2_common::{ }; use serde::{Deserialize, Serialize}; +pub use circuit::ExtractionType; pub use public_inputs::PublicInputs; pub struct CircuitInput(Vec); impl CircuitInput { @@ -31,15 +32,15 @@ pub struct PublicParameters { } /// Returns the parameters necessary to prove block extraction circuits -pub fn build_circuits_params() -> PublicParameters { - PublicParameters::build() +pub fn build_circuits_params(extraction_type: ExtractionType) -> PublicParameters { + PublicParameters::build(extraction_type) } impl PublicParameters { - pub fn build() -> Self { + pub fn build(extraction_type: ExtractionType) -> Self { let config = default_config(); let mut cb = CircuitBuilder::new(config); - let wires = circuit::BlockCircuit::build(&mut cb); + let wires = circuit::BlockCircuit::build(&mut cb, extraction_type); let cd = cb.build(); Self { circuit_data: cd, @@ -76,10 +77,13 @@ mod test { }; use mp2_test::eth::get_sepolia_url; - use crate::block_extraction::{public_inputs::PublicInputs, PublicParameters}; + use crate::block_extraction::{ + circuit::ExtractionType, public_inputs::PublicInputs, PublicParameters, + }; + #[tokio::test] - async fn test_api() -> Result<()> { - let params = PublicParameters::build(); + async fn test_api_storage() -> Result<()> { + let params = PublicParameters::build(ExtractionType::Storage); let url = get_sepolia_url(); let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); let block_number = BlockNumberOrTag::Latest; diff --git a/mp2-v1/src/final_extraction/api.rs b/mp2-v1/src/final_extraction/api.rs index ef152d684..5cb557bb7 100644 --- a/mp2-v1/src/final_extraction/api.rs +++ b/mp2-v1/src/final_extraction/api.rs @@ -11,6 +11,7 @@ use super::{ base_circuit::BaseCircuitInput, lengthed_circuit::LengthedRecursiveWires, merge_circuit::{MergeTable, MergeTableRecursiveWires}, + receipt_circuit::{ReceiptCircuitInput, ReceiptCircuitProofInputs, ReceiptRecursiveWires}, simple_circuit::SimpleCircuitRecursiveWires, BaseCircuitProofInputs, LengthedCircuit, MergeCircuit, PublicInputs, SimpleCircuit, }; @@ -20,6 +21,7 @@ pub enum CircuitInput { Simple(SimpleCircuitInput), Lengthed(LengthedCircuitInput), MergeTable(MergeCircuitInput), + Receipt(ReceiptCircuitInput), } #[derive(Clone, Debug)] pub struct FinalExtractionBuilderParams { @@ -51,6 +53,7 @@ pub struct PublicParameters { simple: CircuitWithUniversalVerifier, lengthed: CircuitWithUniversalVerifier, merge: CircuitWithUniversalVerifier, + receipt: CircuitWithUniversalVerifier, circuit_set: RecursiveCircuits, } @@ -76,12 +79,14 @@ impl PublicParameters { ); let simple = builder.build_circuit(builder_params.clone()); let lengthed = builder.build_circuit(builder_params.clone()); - let merge = builder.build_circuit(builder_params); + let merge = builder.build_circuit(builder_params.clone()); + let receipt = builder.build_circuit(builder_params); let circuits = vec![ prepare_recursive_circuit_for_circuit_set(&simple), prepare_recursive_circuit_for_circuit_set(&lengthed), prepare_recursive_circuit_for_circuit_set(&merge), + prepare_recursive_circuit_for_circuit_set(&receipt), ]; let circuit_set = RecursiveCircuits::new(circuits); @@ -90,6 +95,7 @@ impl PublicParameters { simple, lengthed, merge, + receipt, circuit_set, } } @@ -160,6 +166,19 @@ impl PublicParameters { ProofWithVK::serialize(&(proof, self.lengthed.circuit_data().verifier_only.clone()).into()) } + pub(crate) fn generate_receipt_proof( + &self, + input: ReceiptCircuitInput, + value_circuit_set: &RecursiveCircuits, + ) -> Result> { + let receipt_input = + ReceiptCircuitProofInputs::new_from_proofs(input, value_circuit_set.clone()); + let proof = self + .circuit_set + .generate_proof(&self.receipt, [], [], receipt_input)?; + ProofWithVK::serialize(&(proof, self.receipt.circuit_data().verifier_only.clone()).into()) + } + pub(crate) fn get_circuit_set(&self) -> &RecursiveCircuits { &self.circuit_set } @@ -230,6 +249,13 @@ impl CircuitInput { let length_proof = ProofWithVK::deserialize(&length_proof)?; Ok(Self::Lengthed(LengthedCircuitInput { base, length_proof })) } + + pub fn new_receipt_input(block_proof: Vec, value_proof: Vec) -> Result { + Ok(Self::Receipt(ReceiptCircuitInput::new( + block_proof, + value_proof, + )?)) + } } #[cfg(test)] diff --git a/mp2-v1/src/final_extraction/mod.rs b/mp2-v1/src/final_extraction/mod.rs index cb6e1c6a4..3d78f3af6 100644 --- a/mp2-v1/src/final_extraction/mod.rs +++ b/mp2-v1/src/final_extraction/mod.rs @@ -3,6 +3,7 @@ mod base_circuit; mod lengthed_circuit; mod merge_circuit; mod public_inputs; +mod receipt_circuit; mod simple_circuit; pub use api::{CircuitInput, PublicParameters}; diff --git a/mp2-v1/src/final_extraction/receipt_circuit.rs b/mp2-v1/src/final_extraction/receipt_circuit.rs new file mode 100644 index 000000000..ef536ef83 --- /dev/null +++ b/mp2-v1/src/final_extraction/receipt_circuit.rs @@ -0,0 +1,213 @@ +use mp2_common::{ + default_config, + keccak::{OutputHash, PACKED_HASH_LEN}, + proof::{deserialize_proof, verify_proof_fixed_circuit, ProofWithVK}, + serialization::{deserialize, serialize}, + u256::UInt256Target, + utils::FromTargets, + C, D, F, +}; +use plonky2::{ + field::{goldilocks_field::GoldilocksField, types::Field}, + iop::{ + target::Target, + witness::{PartialWitness, WitnessWrite}, + }, + plonk::{ + circuit_builder::CircuitBuilder, + proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget}, + }, +}; +use plonky2_ecgfp5::gadgets::curve::CurveTarget; +use recursion_framework::{ + circuit_builder::CircuitLogicWires, + framework::{ + RecursiveCircuits, RecursiveCircuitsVerifierGagdet, RecursiveCircuitsVerifierTarget, + }, +}; +use serde::{Deserialize, Serialize}; + +use crate::{block_extraction, values_extraction}; + +use super::api::{FinalExtractionBuilderParams, NUM_IO}; + +use anyhow::Result; + +/// This circuit is more like a gadget. This contains the logic of the common part +/// between all the final extraction circuits. It should not be used on its own. +#[derive(Debug, Clone, Copy)] +pub struct ReceiptExtractionCircuit; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReceiptExtractionWires { + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] + pub(crate) dm: CurveTarget, + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] + pub(crate) dv: CurveTarget, + pub(crate) bh: [Target; PACKED_HASH_LEN], + pub(crate) prev_bh: [Target; PACKED_HASH_LEN], + pub(crate) bn: UInt256Target, +} + +impl ReceiptExtractionCircuit { + pub(crate) fn build( + b: &mut CircuitBuilder, + block_pi: &[Target], + value_pi: &[Target], + ) -> ReceiptExtractionWires { + // TODO: homogeinize the public inputs structs + let block_pi = + block_extraction::public_inputs::PublicInputs::::from_slice(block_pi); + let value_pi = values_extraction::PublicInputs::::new(value_pi); + + let minus_one = b.constant(GoldilocksField::NEG_ONE); + + // enforce the MPT key extraction reached the root + b.connect(value_pi.mpt_key().pointer, minus_one); + + // enforce block_pi.state_root == contract_pi.state_root + block_pi + .state_root() + .enforce_equal(b, &OutputHash::from_targets(value_pi.root_hash_info())); + ReceiptExtractionWires { + dm: value_pi.metadata_digest_target(), + dv: value_pi.values_digest_target(), + bh: block_pi.block_hash_raw().try_into().unwrap(), // safe to unwrap as we give as input the slice of the expected length + prev_bh: block_pi.prev_block_hash_raw().try_into().unwrap(), // safe to unwrap as we give as input the slice of the expected length + bn: block_pi.block_number(), + } + } +} + +/// The wires that are needed for the recursive framework, that concerns verifying the input +/// proofs +#[derive(Serialize, Deserialize, Clone, Debug)] +pub(crate) struct ReceiptRecursiveWires { + /// Wires containing the block and value proof + verification: ReceiptCircuitProofWires, + /// Wires information to check that the value corresponds to the block + consistency: ReceiptExtractionWires, +} + +impl CircuitLogicWires for ReceiptRecursiveWires { + type CircuitBuilderParams = FinalExtractionBuilderParams; + + type Inputs = ReceiptCircuitProofInputs; + + const NUM_PUBLIC_INPUTS: usize = NUM_IO; + + fn circuit_logic( + builder: &mut CircuitBuilder, + _verified_proofs: [&plonky2::plonk::proof::ProofWithPublicInputsTarget; 0], + builder_parameters: Self::CircuitBuilderParams, + ) -> Self { + // value proof for table a and value proof for table b = 2 + let verification = ReceiptCircuitProofInputs::build(builder, &builder_parameters); + let consistency = ReceiptExtractionCircuit::build( + builder, + verification.get_block_public_inputs(), + verification.get_value_public_inputs(), + ); + Self { + verification, + consistency, + } + } + + fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> anyhow::Result<()> { + inputs.assign_proof_targets(pw, &self.verification)?; + Ok(()) + } +} + +/// This parameter struct is not intended to be built on its own +/// but rather as a sub-component of the two final extraction parameters set. +/// This parameter contains the common logic of verifying a block and +/// value proof automatically from the right verification keys / circuit set. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub(crate) struct ReceiptCircuitProofWires { + /// single circuit proof extracting block hash, block number, previous hash + /// and receipt root + #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] + block_proof: ProofWithPublicInputsTarget, + /// circuit set extracting the values from receipt trie of the block + value_proof: RecursiveCircuitsVerifierTarget, +} + +pub(crate) const VALUE_SET_NUM_IO: usize = values_extraction::PublicInputs::::TOTAL_LEN; + +#[derive(Clone, Debug)] +pub struct ReceiptCircuitInput { + block_proof: ProofWithPublicInputs, + value_proof: ProofWithVK, +} + +impl ReceiptCircuitInput { + pub(super) fn new(block_proof: Vec, value_proof: Vec) -> Result { + Ok(Self { + block_proof: deserialize_proof(&block_proof)?, + value_proof: ProofWithVK::deserialize(&value_proof)?, + }) + } +} +#[derive(Clone, Debug)] +pub(crate) struct ReceiptCircuitProofInputs { + proofs: ReceiptCircuitInput, + value_circuit_set: RecursiveCircuits, +} + +impl ReceiptCircuitProofInputs { + pub(crate) fn new_from_proofs( + proofs: ReceiptCircuitInput, + value_circuit_set: RecursiveCircuits, + ) -> Self { + Self { + proofs, + value_circuit_set, + } + } + + pub(crate) fn build( + cb: &mut CircuitBuilder, + params: &FinalExtractionBuilderParams, + ) -> ReceiptCircuitProofWires { + let config = default_config(); + let value_proof_wires = RecursiveCircuitsVerifierGagdet::::new( + config.clone(), + ¶ms.value_circuit_set, + ) + .verify_proof_in_circuit_set(cb); + + let block_proof_wires = verify_proof_fixed_circuit(cb, ¶ms.block_vk); + ReceiptCircuitProofWires { + block_proof: block_proof_wires, + value_proof: value_proof_wires, + } + } + + pub(crate) fn assign_proof_targets( + &self, + pw: &mut PartialWitness, + wires: &ReceiptCircuitProofWires, + ) -> anyhow::Result<()> { + pw.set_proof_with_pis_target(&wires.block_proof, &self.proofs.block_proof); + + let (proof, vd) = (&self.proofs.value_proof).into(); + wires + .value_proof + .set_target(pw, &self.value_circuit_set, proof, vd)?; + + Ok(()) + } +} + +impl ReceiptCircuitProofWires { + pub(crate) fn get_block_public_inputs(&self) -> &[Target] { + self.block_proof.public_inputs.as_slice() + } + + pub(crate) fn get_value_public_inputs(&self) -> &[Target] { + self.value_proof + .get_public_input_targets::() + } +} diff --git a/mp2-v1/src/lib.rs b/mp2-v1/src/lib.rs index 1db586f80..1b1397c28 100644 --- a/mp2-v1/src/lib.rs +++ b/mp2-v1/src/lib.rs @@ -26,5 +26,4 @@ pub mod final_extraction; pub mod indexing; pub mod length_extraction; pub mod query; -pub mod receipt_extraction; pub mod values_extraction; diff --git a/mp2-v1/src/receipt_extraction/mod.rs b/mp2-v1/src/receipt_extraction/mod.rs deleted file mode 100644 index 004a9cfea..000000000 --- a/mp2-v1/src/receipt_extraction/mod.rs +++ /dev/null @@ -1,91 +0,0 @@ -pub mod leaf; -pub mod public_inputs; - -use alloy::{consensus::TxReceipt, primitives::IntoLogData}; - -use mp2_common::{ - digest::Digest, - eth::{EventLogInfo, ReceiptProofInfo}, - group_hashing::map_to_curve_point, - types::GFp, - utils::{Packer, ToFields}, -}; -use plonky2::field::types::Field; - -/// Calculate `metadata_digest = D(address || signature || topics)` for receipt leaf. -/// Topics is an array of 5 values (some are dummies), each being `column_id`, `rel_byte_offset` (from the start of the log) -/// and `len`. -pub fn compute_receipt_leaf_metadata_digest(event: &EventLogInfo) -> Digest { - let topics_flat = event - .topics - .iter() - .chain(event.data.iter()) - .flat_map(|t| [t.column_id, t.rel_byte_offset, t.len]) - .collect::>(); - - let mut out = Vec::new(); - out.push(event.size); - out.extend_from_slice(&event.address.0.map(|byte| byte as usize)); - out.push(event.add_rel_offset); - out.extend_from_slice(&event.event_signature.map(|byte| byte as usize)); - out.push(event.sig_rel_offset); - out.extend_from_slice(&topics_flat); - - let data = out - .into_iter() - .map(GFp::from_canonical_usize) - .collect::>(); - map_to_curve_point(&data) -} - -/// Calculate `value_digest` for receipt leaf. -pub fn compute_receipt_leaf_value_digest(receipt_proof_info: &ReceiptProofInfo) -> Digest { - let receipt = receipt_proof_info.to_receipt().unwrap(); - let gas_used = receipt.cumulative_gas_used(); - - // Only use events that we are indexing - let address = receipt_proof_info.event_log_info.address; - let sig = receipt_proof_info.event_log_info.event_signature; - - let index_digest = map_to_curve_point(&[GFp::from_canonical_u64(receipt_proof_info.tx_index)]); - - let gas_digest = map_to_curve_point(&[GFp::ZERO, GFp::from_noncanonical_u128(gas_used)]); - - receipt - .logs() - .iter() - .cloned() - .filter_map(|log| { - let log_address = log.address; - let log_data = log.to_log_data(); - let (topics, data) = log_data.split(); - - if log_address == address && topics[0].0 == sig { - let topics_field = topics - .iter() - .skip(1) - .map(|fixed| fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields()) - .collect::>(); - let data_fixed_bytes = data - .chunks(32) - .map(|chunk| chunk.pack(mp2_common::utils::Endianness::Big).to_fields()) - .take(2) - .collect::>(); - - Some( - topics_field - .iter() - .chain(data_fixed_bytes.iter()) - .enumerate() - .fold(gas_digest, |acc, (i, fixed)| { - let mut values = vec![GFp::from_canonical_usize(i) + GFp::ONE]; - values.extend_from_slice(fixed); - acc + map_to_curve_point(&values) - }), - ) - } else { - None - } - }) - .fold(index_digest, |acc, p| acc + p) -} diff --git a/mp2-v1/src/receipt_extraction/public_inputs.rs b/mp2-v1/src/receipt_extraction/public_inputs.rs deleted file mode 100644 index 2916c32bb..000000000 --- a/mp2-v1/src/receipt_extraction/public_inputs.rs +++ /dev/null @@ -1,170 +0,0 @@ -//! Public inputs for Receipt Extraction circuits - -use mp2_common::{ - array::Array, - keccak::{OutputHash, PACKED_HASH_LEN}, - mpt_sequential::ReceiptKeyWire, - public_inputs::{PublicInputCommon, PublicInputRange}, - types::{CBuilder, GFp, GFp5, CURVE_TARGET_LEN}, - utils::{convert_point_to_curve_target, convert_slice_to_curve_point, FromTargets}, -}; - -use plonky2::{ - field::{extension::FieldExtension, types::Field}, - iop::target::Target, -}; -use plonky2_ecgfp5::{ - curve::curve::WeierstrassPoint, - gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, -}; - -/// The maximum length of a transaction index in a block in nibbles. -/// Theoretically a block can have up to 1428 transactions in Ethereum, which takes 2 bytes to represent. -const MAX_INDEX_NIBBLES: usize = 4; -// Contract extraction public Inputs: -/// - `H : [8]F` : packed node hash -const H_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; -/// - `K : [4]F` : Length of the transaction index in nibbles -const K_RANGE: PublicInputRange = H_RANGE.end..H_RANGE.end + MAX_INDEX_NIBBLES; -/// `T : F` pointer in the MPT indicating portion of the key already traversed (from 4 → 0) -const T_RANGE: PublicInputRange = K_RANGE.end..K_RANGE.end + 1; -/// - `DV : Digest[F]` : value digest of all rows to extract -const DV_RANGE: PublicInputRange = T_RANGE.end..T_RANGE.end + CURVE_TARGET_LEN; -/// - `DM : Digest[F]` : metadata digest to extract -const DM_RANGE: PublicInputRange = DV_RANGE.end..DV_RANGE.end + CURVE_TARGET_LEN; - -/// Public inputs for contract extraction -#[derive(Clone, Debug)] -pub struct PublicInputArgs<'a> { - /// The hash of the node - pub(crate) h: &'a OutputHash, - /// The MPT key - pub(crate) k: &'a ReceiptKeyWire, - /// Digest of the values - pub(crate) dv: CurveTarget, - /// The poseidon hash of the metadata - pub(crate) dm: CurveTarget, -} - -impl PublicInputCommon for PublicInputArgs<'_> { - const RANGES: &'static [PublicInputRange] = &[H_RANGE, K_RANGE, T_RANGE, DV_RANGE, DM_RANGE]; - - fn register_args(&self, cb: &mut CBuilder) { - self.generic_register_args(cb) - } -} - -impl<'a> PublicInputArgs<'a> { - /// Create a new public inputs. - pub fn new(h: &'a OutputHash, k: &'a ReceiptKeyWire, dv: CurveTarget, dm: CurveTarget) -> Self { - Self { h, k, dv, dm } - } -} - -impl PublicInputArgs<'_> { - pub fn generic_register_args(&self, cb: &mut CBuilder) { - self.h.register_as_public_input(cb); - self.k.register_as_input(cb); - cb.register_curve_public_input(self.dv); - cb.register_curve_public_input(self.dm); - } - - pub fn digest_value(&self) -> CurveTarget { - self.dv - } - - pub fn digest_metadata(&self) -> CurveTarget { - self.dm - } -} - -/// Public inputs wrapper of any proof generated in this module -#[derive(Clone, Debug)] -pub struct PublicInputs<'a, T> { - pub(crate) proof_inputs: &'a [T], -} - -impl PublicInputs<'_, Target> { - /// Get the merkle hash of the subtree this proof has processed. - pub fn root_hash_target(&self) -> OutputHash { - OutputHash::from_targets(self.root_hash_info()) - } - - /// Get the MPT key defined over the public inputs. - pub fn mpt_key(&self) -> ReceiptKeyWire { - let (key, ptr) = self.mpt_key_info(); - ReceiptKeyWire { - key: Array { - arr: std::array::from_fn(|i| key[i]), - }, - pointer: ptr, - } - } - - /// Get the values digest defined over the public inputs. - pub fn values_digest_target(&self) -> CurveTarget { - convert_point_to_curve_target(self.values_digest_info()) - } - - /// Get the metadata digest defined over the public inputs. - pub fn metadata_digest_target(&self) -> CurveTarget { - convert_point_to_curve_target(self.metadata_digest_info()) - } -} - -impl PublicInputs<'_, GFp> { - /// Get the merkle hash of the subtree this proof has processed. - pub fn root_hash(&self) -> Vec { - let hash = self.root_hash_info(); - hash.iter().map(|t| t.0 as u32).collect() - } - - /// Get the values digest defined over the public inputs. - pub fn values_digest(&self) -> WeierstrassPoint { - let (x, y, is_inf) = self.values_digest_info(); - - WeierstrassPoint { - x: GFp5::from_basefield_array(std::array::from_fn::(|i| x[i])), - y: GFp5::from_basefield_array(std::array::from_fn::(|i| y[i])), - is_inf: is_inf.is_nonzero(), - } - } - - /// Get the metadata digest defined over the public inputs. - pub fn metadata_digest(&self) -> WeierstrassPoint { - let (x, y, is_inf) = self.metadata_digest_info(); - - WeierstrassPoint { - x: GFp5::from_basefield_array(std::array::from_fn::(|i| x[i])), - y: GFp5::from_basefield_array(std::array::from_fn::(|i| y[i])), - is_inf: is_inf.is_nonzero(), - } - } -} - -impl<'a, T: Copy> PublicInputs<'a, T> { - pub(crate) const TOTAL_LEN: usize = DM_RANGE.end; - - pub fn new(proof_inputs: &'a [T]) -> Self { - Self { proof_inputs } - } - - pub fn root_hash_info(&self) -> &[T] { - &self.proof_inputs[H_RANGE] - } - - pub fn mpt_key_info(&self) -> (&[T], T) { - let key = &self.proof_inputs[K_RANGE]; - let ptr = self.proof_inputs[T_RANGE.start]; - - (key, ptr) - } - - pub fn values_digest_info(&self) -> ([T; 5], [T; 5], T) { - convert_slice_to_curve_point(&self.proof_inputs[DV_RANGE]) - } - - pub fn metadata_digest_info(&self) -> ([T; 5], [T; 5], T) { - convert_slice_to_curve_point(&self.proof_inputs[DM_RANGE]) - } -} diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index a1bcaa6a8..2eedd5fe4 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -4,14 +4,16 @@ use super::{ branch::{BranchCircuit, BranchWires}, extension::{ExtensionNodeCircuit, ExtensionNodeWires}, leaf_mapping::{LeafMappingCircuit, LeafMappingWires}, + leaf_receipt::{ReceiptLeafCircuit, ReceiptLeafWires}, leaf_single::{LeafSingleCircuit, LeafSingleWires}, public_inputs::PublicInputs, }; -use crate::{api::InputNode, MAX_BRANCH_NODE_LEN, MAX_LEAF_NODE_LEN}; +use crate::{api::InputNode, MAX_BRANCH_NODE_LEN, MAX_LEAF_NODE_LEN, MAX_RECEIPT_LEAF_NODE_LEN}; use anyhow::{bail, ensure, Result}; use log::debug; use mp2_common::{ default_config, + eth::ReceiptProofInfo, mpt_sequential::PAD_LEN, proof::{ProofInputSerialized, ProofWithVK}, storage_key::{MappingSlot, SimpleSlot}, @@ -32,6 +34,7 @@ use std::array; type LeafSingleWire = LeafSingleWires; type LeafMappingWire = LeafMappingWires; +type LeafReceiptWire = ReceiptLeafWires; type ExtensionInput = ProofInputSerialized; type BranchInput = ProofInputSerialized; const NUM_IO: usize = PublicInputs::::TOTAL_LEN; @@ -42,6 +45,7 @@ const NUM_IO: usize = PublicInputs::::TOTAL_LEN; pub enum CircuitInput { LeafSingle(LeafSingleCircuit), LeafMapping(LeafMappingCircuit), + LeafReceipt(ReceiptLeafCircuit), Extension(ExtensionInput), BranchSingle(BranchInput), BranchMapping(BranchInput), @@ -73,6 +77,11 @@ impl CircuitInput { }) } + /// Create a circuit input for proving a leaf MPT node of a transaction receipt. + pub fn new_receipt_leaf(info: ReceiptProofInfo) -> Self { + CircuitInput::LeafReceipt(ReceiptLeafCircuit { info }) + } + /// Create a circuit input for proving an extension MPT node. pub fn new_extension(node: Vec, child_proof: Vec) -> Self { CircuitInput::Extension(ExtensionInput { @@ -106,6 +115,7 @@ impl CircuitInput { pub struct PublicParameters { leaf_single: CircuitWithUniversalVerifier, leaf_mapping: CircuitWithUniversalVerifier, + leaf_receipt: CircuitWithUniversalVerifier, extension: CircuitWithUniversalVerifier, #[cfg(not(test))] branches: BranchCircuits, @@ -285,8 +295,8 @@ impl_branch_circuits!(BranchCircuits, 2, 9, 16); impl_branch_circuits!(TestBranchCircuits, 1, 4, 9); /// Number of circuits in the set -/// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping -const MAPPING_CIRCUIT_SET_SIZE: usize = 6; +/// 3 branch circuits + 1 extension + 1 leaf single + 1 leaf mapping + 1 leaf receipt +const MAPPING_CIRCUIT_SET_SIZE: usize = 7; impl PublicParameters { /// Generates the circuit parameters for the MPT circuits. @@ -311,6 +321,10 @@ impl PublicParameters { let leaf_mapping = circuit_builder.build_circuit::>(()); + debug!("Building leaf receipt circuit"); + let leaf_receipt = + circuit_builder.build_circuit::>(()); + debug!("Building extension circuit"); let extension = circuit_builder.build_circuit::(()); @@ -323,6 +337,7 @@ impl PublicParameters { let mut circuits_set = vec![ leaf_single.get_verifier_data().circuit_digest, leaf_mapping.get_verifier_data().circuit_digest, + leaf_receipt.get_verifier_data().circuit_digest, extension.get_verifier_data().circuit_digest, ]; circuits_set.extend(branches.circuit_set()); @@ -331,6 +346,7 @@ impl PublicParameters { PublicParameters { leaf_single, leaf_mapping, + leaf_receipt, extension, branches, #[cfg(not(test))] @@ -349,6 +365,9 @@ impl PublicParameters { CircuitInput::LeafMapping(leaf) => set .generate_proof(&self.leaf_mapping, [], [], leaf) .map(|p| (p, self.leaf_mapping.get_verifier_data().clone()).into()), + CircuitInput::LeafReceipt(leaf) => set + .generate_proof(&self.leaf_receipt, [], [], leaf) + .map(|p| (p, self.leaf_receipt.get_verifier_data().clone()).into()), CircuitInput::Extension(ext) => { let mut child_proofs = ext.get_child_proofs()?; diff --git a/mp2-v1/src/receipt_extraction/leaf.rs b/mp2-v1/src/values_extraction/leaf_receipt.rs similarity index 88% rename from mp2-v1/src/receipt_extraction/leaf.rs rename to mp2-v1/src/values_extraction/leaf_receipt.rs index 429f46bd9..3e8926773 100644 --- a/mp2-v1/src/receipt_extraction/leaf.rs +++ b/mp2-v1/src/values_extraction/leaf_receipt.rs @@ -2,15 +2,16 @@ use crate::MAX_RECEIPT_LEAF_NODE_LEN; -use super::public_inputs::{PublicInputArgs, PublicInputs}; +use super::public_inputs::{PublicInputs, PublicInputsArgs}; use mp2_common::{ array::{Array, Vector, VectorWire}, eth::{EventLogInfo, LogDataInfo, ReceiptProofInfo}, group_hashing::CircuitBuilderGroupHashing, keccak::{InputData, KeccakCircuit, KeccakWires}, - mpt_sequential::{MPTReceiptLeafNode, ReceiptKeyWire, MAX_TX_KEY_NIBBLE_LEN, PAD_LEN}, + mpt_sequential::{MPTKeyWire, MPTReceiptLeafNode, PAD_LEN}, public_inputs::PublicInputCommon, + rlp::MAX_KEY_NIBBLE_LEN, types::{CBuilder, GFp}, utils::{less_than, less_than_or_equal_to, Endianness, PackerTarget}, D, F, @@ -29,7 +30,7 @@ use plonky2_ecgfp5::gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}; use recursion_framework::circuit_builder::CircuitLogicWires; use rlp::Encodable; use serde::{Deserialize, Serialize}; -use std::array::from_fn; +use std::{array::from_fn, iter}; /// Maximum number of logs per transaction we can process const MAX_LOGS_PER_TX: usize = 2; @@ -51,7 +52,7 @@ where /// The offsets of the relevant logs inside the node pub relevant_logs_offset: VectorWire, /// The key in the MPT Trie - pub mpt_key: ReceiptKeyWire, + pub mpt_key: MPTKeyWire, } /// Contains all the information for an [`Event`] in rlp form @@ -85,7 +86,7 @@ pub struct LogColumn { impl LogColumn { /// Convert to an array for metadata digest - pub fn to_array(&self) -> [Target; 3] { + pub fn to_array(self) -> [Target; 3] { [self.column_id, self.rel_byte_offset, self.len] } @@ -130,7 +131,7 @@ impl EventWires { b: &mut CBuilder, value: &VectorWire, relevant_logs_offsets: &VectorWire, - ) -> CurveTarget { + ) -> (Target, CurveTarget) { let t = b._true(); let one = b.one(); let two = b.two(); @@ -144,13 +145,16 @@ impl EventWires { value.arr[0], F::from_canonical_u64(1) - F::from_canonical_u64(247), ); - // let key_header = value.arr.random_access_large_array(b, header_len_len); - // let key_header_len = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); + let key_header = value.arr.random_access_large_array(b, header_len_len); + let less_than_val = b.constant(F::from_canonical_u8(128)); + let single_value = less_than(b, key_header, less_than_val, 8); + let key_len_maybe = b.add_const(key_header, F::ONE - F::from_canonical_u64(128)); + let key_len = b.select(single_value, one, key_len_maybe); // This is the start of the string that is the rlp encoded receipt (a string since the first element is transaction type). // From here we subtract 183 to get the length of the length, then the encoded gas used is at length of length + 1 (for tx type) + (1 + list length) // + 1 (for status) + 1 to get the header for the gas used string. - let string_offset = b.add(one, header_len_len); + let string_offset = b.add(key_len, header_len_len); let string_header = value.arr.random_access_large_array(b, string_offset); let string_len_len = b.add_const(string_header, -F::from_canonical_u64(183)); @@ -190,7 +194,9 @@ impl EventWires { // Map the gas used to a curve point for the value digest, gas used is the first column so use one as its column id. let gas_digest = b.map_to_curve_point(&[zero, gas_used]); - for log_offset in relevant_logs_offsets.arr.arr { + // We also keep track of the number of real logs we process as each log forms a row in our table + let mut n = zero; + for (index, log_offset) in relevant_logs_offsets.arr.arr.into_iter().enumerate() { // Extract the address bytes let address_start = b.add(log_offset, self.add_rel_offset); @@ -234,12 +240,22 @@ impl EventWires { points.push(selected_point); } - + // If this is a real row we record the gas used in the transaction let gas_select = b.select_curve_point(dummy, curve_zero, gas_digest); points.push(gas_select); + + // We also keep track of which log this is in the receipt to avoid having identical rows in the table in the case + // that the event we are tracking can be emitted multiple times in the same transaction but has no topics or data. + let log_number = b.constant(F::from_canonical_usize(index + 1)); + let log_no_digest = b.map_to_curve_point(&[one, log_number]); + let log_no_select = b.select_curve_point(dummy, curve_zero, log_no_digest); + points.push(log_no_select); + + let increment = b.select(dummy, zero, one); + n = b.add(n, increment); } - println!("points length: {}", points.len()); - b.add_curve_point(&points) + + (n, b.add_curve_point(&points)) } } @@ -262,7 +278,7 @@ where let status_offset = b.add_virtual_target(); let relevant_logs_offset = VectorWire::::new(b); - let mpt_key = ReceiptKeyWire::new(b); + let mpt_key = MPTKeyWire::new(b); // Build the node wires. let wires = MPTReceiptLeafNode::build_and_advance_key::<_, D, NODE_LEN>(b, &mpt_key); @@ -271,9 +287,9 @@ where let root = wires.root; // For each relevant log in the transaction we have to verify it lines up with the event we are monitoring for - let mut dv = + let (n, mut dv) = event_wires.verify_logs_and_extract_values::(b, &node, &relevant_logs_offset); - println!("dv target: {:?}", dv); + let value_id = b.map_to_curve_point(&[index]); dv = b.add_curve_point(&[value_id, dv]); @@ -281,11 +297,12 @@ where let dm = b.map_to_curve_point(&event_wires.to_vec()); // Register the public inputs - PublicInputArgs { + PublicInputsArgs { h: &root.output_array, k: &wires.key, dv, dm, + n, } .register_args(b); @@ -372,20 +389,14 @@ where wires.relevant_logs_offset.assign(pw, &relevant_logs_vector); let key_encoded = self.info.tx_index.rlp_bytes(); - let nibbles = key_encoded + let key_nibbles: [u8; MAX_KEY_NIBBLE_LEN] = key_encoded .iter() .flat_map(|byte| [byte / 16, byte % 16]) - .collect::>(); - - let mut key_nibbles = [0u8; MAX_TX_KEY_NIBBLE_LEN]; - key_nibbles - .iter_mut() - .enumerate() - .for_each(|(index, nibble)| { - if index < nibbles.len() { - *nibble = nibbles[index] - } - }); + .chain(iter::repeat(0u8)) + .take(64) + .collect::>() + .try_into() + .expect("Couldn't create mpt key with correct length"); wires.mpt_key.assign(pw, &key_nibbles, self.info.index_size); } @@ -462,10 +473,11 @@ impl CircuitLogicWires for ReceiptLeafWires Digest { + let topics_flat = event + .topics + .iter() + .chain(event.data.iter()) + .flat_map(|t| [t.column_id, t.rel_byte_offset, t.len]) + .collect::>(); + + let mut out = Vec::new(); + out.push(event.size); + out.extend_from_slice(&event.address.0.map(|byte| byte as usize)); + out.push(event.add_rel_offset); + out.extend_from_slice(&event.event_signature.map(|byte| byte as usize)); + out.push(event.sig_rel_offset); + out.extend_from_slice(&topics_flat); + + let data = out + .into_iter() + .map(GFp::from_canonical_usize) + .collect::>(); + map_to_curve_point(&data) +} + +/// Calculate `value_digest` for receipt leaf. +pub fn compute_receipt_leaf_value_digest(receipt_proof_info: &ReceiptProofInfo) -> Digest { + let receipt = receipt_proof_info.to_receipt().unwrap(); + let gas_used = receipt.cumulative_gas_used(); + + // Only use events that we are indexing + let address = receipt_proof_info.event_log_info.address; + let sig = receipt_proof_info.event_log_info.event_signature; + + let index_digest = map_to_curve_point(&[GFp::from_canonical_u64(receipt_proof_info.tx_index)]); + + let gas_digest = map_to_curve_point(&[GFp::ZERO, GFp::from_noncanonical_u128(gas_used)]); + let mut n = 0; + receipt + .logs() + .iter() + .cloned() + .filter_map(|log| { + let log_address = log.address; + let log_data = log.to_log_data(); + let (topics, data) = log_data.split(); + + if log_address == address && topics[0].0 == sig { + n += 1; + let topics_field = topics + .iter() + .skip(1) + .map(|fixed| fixed.0.pack(mp2_common::utils::Endianness::Big).to_fields()) + .collect::>(); + let data_fixed_bytes = data + .chunks(32) + .map(|chunk| chunk.pack(mp2_common::utils::Endianness::Big).to_fields()) + .take(2) + .collect::>(); + let log_no_digest = map_to_curve_point(&[GFp::ONE, GFp::from_canonical_usize(n)]); + let initial_digest = gas_digest + log_no_digest; + Some( + topics_field + .iter() + .chain(data_fixed_bytes.iter()) + .enumerate() + .fold(initial_digest, |acc, (i, fixed)| { + let mut values = vec![GFp::from_canonical_usize(i + 2)]; + values.extend_from_slice(fixed); + acc + map_to_curve_point(&values) + }), + ) + } else { + None + } + }) + .fold(index_digest, |acc, p| acc + p) +} diff --git a/mp2-v1/tests/common/context.rs b/mp2-v1/tests/common/context.rs index f7ae3e7a0..af78678c1 100644 --- a/mp2-v1/tests/common/context.rs +++ b/mp2-v1/tests/common/context.rs @@ -12,7 +12,10 @@ use anyhow::{Context, Result}; use envconfig::Envconfig; use log::info; use mp2_common::eth::ProofQuery; -use mp2_v1::api::{build_circuits_params, PublicParameters}; +use mp2_v1::{ + api::{build_circuits_params, PublicParameters}, + block_extraction::ExtractionType, +}; use std::{ fs::File, io::{BufReader, BufWriter}, @@ -90,14 +93,14 @@ pub async fn new_local_chain(storage: ProofKV) -> TestContext { } pub enum ParamsType { - Indexing, + Indexing(ExtractionType), Query, } impl ParamsType { pub fn full_path(&self, mut pre: PathBuf) -> PathBuf { match self { - ParamsType::Indexing => pre.push("index.params"), + ParamsType::Indexing(_) => pre.push("index.params"), ParamsType::Query => pre.push("query.params"), }; pre @@ -113,7 +116,7 @@ impl ParamsType { .context("while parsing MP2 parameters")?; ctx.query_params = Some(params); } - ParamsType::Indexing => { + ParamsType::Indexing(_) => { info!("parsing the indexing mp2-v1 parameters"); let params = bincode::deserialize_from(BufReader::new( File::open(&path).with_context(|| format!("while opening {path:?}"))?, @@ -145,9 +148,9 @@ impl ParamsType { ctx.query_params = Some(params); Ok(()) } - ParamsType::Indexing => { + ParamsType::Indexing(et) => { info!("building the mp2 indexing parameters"); - let mp2 = build_circuits_params(); + let mp2 = build_circuits_params(*et); ctx.params = Some(mp2); info!("writing the mp2-v1 indexing parameters"); Ok(()) @@ -170,7 +173,7 @@ impl ParamsType { )?; Ok(()) } - ParamsType::Indexing => { + ParamsType::Indexing(_) => { bincode::serialize_into( BufWriter::new( File::create(&path).with_context(|| format!("while creating {path:?}"))?, diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index 8ce01bcb4..6e8ee3807 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -33,6 +33,7 @@ use common::{ }; use envconfig::Envconfig; use log::info; +use mp2_v1::block_extraction::ExtractionType; use parsil::{ assembler::DynamicCircuitPis, parse_and_validate, @@ -82,7 +83,8 @@ async fn integrated_indexing() -> Result<()> { let mut ctx = context::new_local_chain(storage).await; info!("Initial Anvil block: {}", ctx.block_number().await); info!("Building indexing params"); - ctx.build_params(ParamsType::Indexing).unwrap(); + ctx.build_params(ParamsType::Indexing(ExtractionType::Storage)) + .unwrap(); info!("Params built"); // NOTE: to comment to avoid very long tests... From fc59d4df79127f64f0df81be631ec161b2843ec4 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 2 Dec 2024 13:16:00 +0000 Subject: [PATCH 10/15] Added unit tests for receipt leaf api --- mp2-common/src/eth.rs | 112 +++++++++++------ mp2-test/src/mpt_sequential.rs | 73 +++++------ mp2-v1/src/block_extraction/circuit.rs | 161 ++++++++++++++++--------- mp2-v1/src/values_extraction/api.rs | 67 +++++++++- 4 files changed, 285 insertions(+), 128 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 9a36d30a8..b39a4939a 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -720,7 +720,6 @@ mod test { types::BlockNumber, }; use hashbrown::HashMap; - use tokio::task::JoinSet; use crate::{ mpt_sequential::utils::nibbles_to_bytes, @@ -860,9 +859,6 @@ mod test { #[tokio::test] async fn test_receipt_query() -> Result<()> { - let rpc = ProviderBuilder::new() - .on_anvil_with_config(|anvil| Anvil::fork(anvil, get_sepolia_url())); - // Make a contract that emits events so we can pick up on them sol! { #[allow(missing_docs)] @@ -889,44 +885,84 @@ mod test { } } } + + sol! { + #[allow(missing_docs)] + // solc v0.8.26; solc Counter.sol --via-ir --optimize --bin + #[sol(rpc, abi, bytecode="6080604052348015600e575f80fd5b506102288061001c5f395ff3fe608060405234801561000f575f80fd5b506004361061004a575f3560e01c8063488814e01461004e5780637229db15146100585780638381f58a14610062578063d09de08a14610080575b5f80fd5b61005661008a565b005b6100606100f8565b005b61006a610130565b6040516100779190610165565b60405180910390f35b610088610135565b005b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100c0610135565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a26100f6610135565b565b5f547fbe3cbcfa5d4a62a595b4a15f51de63c11797bbef2ff687873efb0bb2852ee20f60405160405180910390a261012e610135565b565b5f5481565b5f80815480929190610146906101ab565b9190505550565b5f819050919050565b61015f8161014d565b82525050565b5f6020820190506101785f830184610156565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f6101b58261014d565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036101e7576101e661017e565b5b60018201905091905056fea26469706673582212203b7602644bfff2df89c2fe9498cd533326876859a0df7b96ac10be1fdc09c3a064736f6c634300081a0033")] + + contract OtherEmitter { + uint256 public number; + event otherEvent(uint256 indexed num); + + function otherEmit() public { + emit otherEvent(number); + increment(); + } + + function twoEmits() public { + emit otherEvent(number); + increment(); + emit otherEvent(number); + increment(); + } + + function increment() public { + number++; + } + } + } + + // Spin up a local node. + + let rpc = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_config(|anvil| Anvil::arg(anvil, "--no-mining")); + + // Turn on auto mining to deploy the contracts + rpc.anvil_set_auto_mine(true).await.unwrap(); + // Deploy the contract using anvil - let contract = EventEmitter::deploy(rpc.clone()).await?; + let event_contract = EventEmitter::deploy(rpc.root()).await.unwrap(); - let tx_reqs = (0..10) - .map(|i| match i % 2 { - 0 => contract.testEmit().into_transaction_request(), - 1 => contract.twoEmits().into_transaction_request(), + // Deploy the contract using anvil + let other_contract = OtherEmitter::deploy(rpc.root()).await.unwrap(); + + // Disable auto mining so we can ensure that all the transaction appear in the same block + rpc.anvil_set_auto_mine(false).await.unwrap(); + + let mut pending_tx_builders = vec![]; + for i in 0..25 { + let tx_req = match i % 4 { + 0 => event_contract.testEmit().into_transaction_request(), + 1 => event_contract.twoEmits().into_transaction_request(), + 2 => other_contract.otherEmit().into_transaction_request(), + 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), - }) - .collect::>(); - let mut join_set = JoinSet::new(); - - tx_reqs.into_iter().for_each(|tx_req| { - let rpc_clone = rpc.clone(); - join_set.spawn(async move { - rpc_clone - .anvil_auto_impersonate_account(true) - .await - .unwrap(); - let sender_address = Address::random(); - let balance = U256::from(1e18 as u64); - rpc_clone - .anvil_set_balance(sender_address, balance) - .await - .unwrap(); - rpc_clone - .send_transaction(tx_req.with_from(sender_address)) - .await - .unwrap() - .watch() - .await - .unwrap() - }); - }); + }; + + let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); + rpc.anvil_set_balance(sender_address, funding) + .await + .unwrap(); + rpc.anvil_auto_impersonate_account(true).await.unwrap(); + let new_req = tx_req.with_from(sender_address); + let tx_req_final = rpc + .fill(new_req) + .await + .unwrap() + .as_builder() + .unwrap() + .clone(); + pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); + } + + rpc.anvil_mine(Some(U256::from(1u8)), None).await.unwrap(); - let hashes = join_set.join_all().await; let mut transactions = Vec::new(); - for hash in hashes.into_iter() { + for pending in pending_tx_builders.into_iter() { + let hash = pending.watch().await.unwrap(); transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); } @@ -936,7 +972,7 @@ mod test { let all_events = EventEmitter::abi::events(); let events = all_events.get("testEvent").unwrap(); - let receipt_query = ReceiptQuery::new(*contract.address(), events[0].clone()); + let receipt_query = ReceiptQuery::new(*event_contract.address(), events[0].clone()); let block = rpc .get_block( diff --git a/mp2-test/src/mpt_sequential.rs b/mp2-test/src/mpt_sequential.rs index 70080429a..6f5fa8719 100644 --- a/mp2-test/src/mpt_sequential.rs +++ b/mp2-test/src/mpt_sequential.rs @@ -11,7 +11,6 @@ use eth_trie::{EthTrie, MemoryDB, Trie}; use mp2_common::eth::{ReceiptProofInfo, ReceiptQuery}; use rand::{thread_rng, Rng}; use std::sync::Arc; -use tokio::task::JoinSet; /// Simply the maximum number of nibbles a key can have. const MAX_KEY_NIBBLE_LEN: usize = 64; @@ -112,50 +111,56 @@ pub fn generate_receipt_proofs() -> Vec { rt.block_on(async { // Spin up a local node. - let rpc = ProviderBuilder::new().on_anvil_with_config(|anvil| Anvil::block_time(anvil, 1)); + let rpc = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_config(|anvil| Anvil::arg(anvil, "--no-mining")); + + // Turn on auto mining to deploy the contracts + rpc.anvil_set_auto_mine(true).await.unwrap(); // Deploy the contract using anvil - let event_contract = EventEmitter::deploy(rpc.clone()).await.unwrap(); + let event_contract = EventEmitter::deploy(rpc.root()).await.unwrap(); // Deploy the contract using anvil - let other_contract = OtherEmitter::deploy(rpc.clone()).await.unwrap(); + let other_contract = OtherEmitter::deploy(rpc.root()).await.unwrap(); + + // Disable auto mining so we can ensure that all the transaction appear in the same block + rpc.anvil_set_auto_mine(false).await.unwrap(); - let tx_reqs = (0..25) - .map(|i| match i % 4 { + // Send a bunch of transactions, some of which are related to the event we are testing for. + let mut pending_tx_builders = vec![]; + for i in 0..25 { + let tx_req = match i % 4 { 0 => event_contract.testEmit().into_transaction_request(), 1 => event_contract.twoEmits().into_transaction_request(), 2 => other_contract.otherEmit().into_transaction_request(), 3 => other_contract.twoEmits().into_transaction_request(), _ => unreachable!(), - }) - .collect::>(); - let mut join_set = JoinSet::new(); - tx_reqs.into_iter().for_each(|tx_req| { - let rpc_clone = rpc.clone(); - join_set.spawn(async move { - let sender_address = Address::random(); - let funding = U256::from(1e18 as u64); - rpc_clone - .anvil_set_balance(sender_address, funding) - .await - .unwrap(); - rpc_clone - .anvil_auto_impersonate_account(true) - .await - .unwrap(); - rpc_clone - .send_transaction(tx_req.with_from(sender_address)) - .await - .unwrap() - .watch() - .await - .unwrap() - }); - }); - - let hashes = join_set.join_all().await; + }; + + let sender_address = Address::random(); + let funding = U256::from(1e18 as u64); + rpc.anvil_set_balance(sender_address, funding) + .await + .unwrap(); + rpc.anvil_auto_impersonate_account(true).await.unwrap(); + let new_req = tx_req.with_from(sender_address); + let tx_req_final = rpc + .fill(new_req) + .await + .unwrap() + .as_builder() + .unwrap() + .clone(); + pending_tx_builders.push(rpc.send_transaction(tx_req_final).await.unwrap()); + } + + // Mine a block, it should include all the transactions created above. + rpc.anvil_mine(Some(U256::from(1u8)), None).await.unwrap(); + let mut transactions = Vec::new(); - for hash in hashes.into_iter() { + for pending in pending_tx_builders.into_iter() { + let hash = pending.watch().await.unwrap(); transactions.push(rpc.get_transaction_by_hash(hash).await.unwrap().unwrap()); } diff --git a/mp2-v1/src/block_extraction/circuit.rs b/mp2-v1/src/block_extraction/circuit.rs index 4ba2c643d..f9d51c8f3 100644 --- a/mp2-v1/src/block_extraction/circuit.rs +++ b/mp2-v1/src/block_extraction/circuit.rs @@ -172,65 +172,116 @@ mod test { use super::{public_inputs::PublicInputs, BlockCircuit, BlockWires}; use anyhow::Result; - pub type SepoliaBlockCircuit = BlockCircuit; - #[tokio::test] async fn prove_and_verify_block_extraction_circuit() -> Result<()> { - let url = get_sepolia_url(); - let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let block_number = BlockNumberOrTag::Latest; - let block = provider - .get_block_by_number(block_number, true.into()) - .await - .unwrap() - .unwrap(); - - let rlp_headers = block.rlp(); - - let prev_block_hash = block - .header - .parent_hash - .0 - .pack(Endianness::Little) - .to_fields(); - let block_hash = block.block_hash().pack(Endianness::Little).to_fields(); - let state_root = block - .header - .state_root - .0 - .pack(Endianness::Little) - .to_fields(); - let block_number_buff = block.header.number.to_be_bytes(); - const NUM_LIMBS: usize = u256::NUM_LIMBS; - let block_number = - left_pad_generic::(&block_number_buff.pack(Endianness::Big)) - .to_fields(); - - let setup = setup_circuit::<_, D, C, SepoliaBlockCircuit>(); - let circuit = SepoliaBlockCircuit::new(rlp_headers).unwrap(); - let proof = prove_circuit(&setup, &circuit); - let pi = PublicInputs::::from_slice(&proof.public_inputs); - - assert_eq!(pi.prev_block_hash_raw(), &prev_block_hash); - assert_eq!(pi.block_hash_raw(), &block_hash); - assert_eq!( - pi.block_hash_raw(), - block.header.hash.0.pack(Endianness::Little).to_fields() - ); - assert_eq!(pi.state_root_raw(), &state_root); - assert_eq!(pi.block_number_raw(), &block_number); - Ok(()) + prove_and_verify_storage_block_extraction_circuit().await?; + prove_and_verify_receipt_block_extraction_circuit().await } - impl UserCircuit for BlockCircuit { - type Wires = BlockWires; - - fn build(cb: &mut CBuilder) -> Self::Wires { - Self::build(cb, super::ExtractionType::Storage) - } - - fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - self.assign(pw, wires); + /// Macro used to produce testing functions for the various types of extraction we do. + macro_rules! impl_test_block_circuit { + ($(($fn_name:ident, $extraction:expr)), *) => { + $( + pub async fn $fn_name() -> Result<()> { + #[derive(Clone, Debug)] + pub struct TestCircuit { + inner: BlockCircuit, + } + + impl TestCircuit { + pub fn new(rlp_headers: Vec) -> Result { + crate::block_extraction::circuit::ensure!( + rlp_headers.len() <= crate::block_extraction::circuit::MAX_BLOCK_LEN, + "block rlp headers too long" + ); + Ok(Self {inner: BlockCircuit { rlp_headers }}) + } + } + + impl UserCircuit for TestCircuit { + type Wires = BlockWires; + + fn build(cb: &mut CBuilder) -> Self::Wires { + BlockCircuit::build(cb, $extraction) + } + + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + self.inner.assign(pw, wires); + } + } + let url = get_sepolia_url(); + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + let block_number = BlockNumberOrTag::Latest; + let block = provider + .get_block_by_number(block_number, true.into()) + .await + .unwrap() + .unwrap(); + + let rlp_headers = block.rlp(); + + let prev_block_hash = block + .header + .parent_hash + .0 + .pack(Endianness::Little) + .to_fields(); + let block_hash = block.block_hash().pack(Endianness::Little).to_fields(); + let root = match $extraction { + super::ExtractionType::Storage => {block + .header + .state_root + .0 + .pack(Endianness::Little) + .to_fields()}, + super::ExtractionType::Receipt => {block + .header + .receipts_root + .0 + .pack(Endianness::Little) + .to_fields()}, + super::ExtractionType::Transaction => {block + .header + .transactions_root + .0 + .pack(Endianness::Little) + .to_fields()}, + + }; + let block_number_buff = block.header.number.to_be_bytes(); + const NUM_LIMBS: usize = u256::NUM_LIMBS; + let block_number = + left_pad_generic::(&block_number_buff.pack(Endianness::Big)) + .to_fields(); + + let setup = setup_circuit::<_, D, C, TestCircuit>(); + let circuit = TestCircuit::new(rlp_headers).unwrap(); + let proof = prove_circuit(&setup, &circuit); + let pi = PublicInputs::::from_slice(&proof.public_inputs); + + assert_eq!(pi.prev_block_hash_raw(), &prev_block_hash); + assert_eq!(pi.block_hash_raw(), &block_hash); + assert_eq!( + pi.block_hash_raw(), + block.header.hash.0.pack(Endianness::Little).to_fields() + ); + + assert_eq!(pi.state_root_raw(), &root); + assert_eq!(pi.block_number_raw(), &block_number); + Ok(()) + } + )* } } + + impl_test_block_circuit!( + ( + prove_and_verify_storage_block_extraction_circuit, + super::ExtractionType::Storage + ), + ( + prove_and_verify_receipt_block_extraction_circuit, + super::ExtractionType::Receipt + ) + ); } diff --git a/mp2-v1/src/values_extraction/api.rs b/mp2-v1/src/values_extraction/api.rs index 2eedd5fe4..1242885a5 100644 --- a/mp2-v1/src/values_extraction/api.rs +++ b/mp2-v1/src/values_extraction/api.rs @@ -429,7 +429,10 @@ mod tests { mpt_sequential::utils::bytes_to_nibbles, types::{GFp, ADDRESS_LEN}, }; - use mp2_test::{mpt_sequential::generate_random_storage_mpt, utils::random_vector}; + use mp2_test::{ + mpt_sequential::{generate_random_storage_mpt, generate_receipt_proofs}, + utils::random_vector, + }; use plonky2::field::types::Field; use plonky2_ecgfp5::curve::curve::Point; use serial_test::serial; @@ -703,6 +706,68 @@ mod tests { ); } + #[test] + fn test_receipt_api() { + let receipt_proof_infos = generate_receipt_proofs(); + + // We check that we have enough receipts and then take the second and third info + // (the MPT proof for the first node is different). + // Then check that the node above both is a branch. + assert!(receipt_proof_infos.len() > 3); + let second_info = &receipt_proof_infos[1]; + let third_info = &receipt_proof_infos[2]; + + let proof_length_1 = second_info.mpt_proof.len(); + let proof_length_2 = third_info.mpt_proof.len(); + + let list_one = rlp::decode_list::>(&second_info.mpt_proof[proof_length_1 - 2]); + let list_two = rlp::decode_list::>(&third_info.mpt_proof[proof_length_2 - 2]); + + assert!(list_one == list_two); + assert!(list_one.len() == 17); + + println!("Generating params..."); + let params = build_circuits_params(); + + println!("Proving leaf 1..."); + let leaf_input_1 = CircuitInput::new_receipt_leaf(second_info.clone()); + let now = std::time::Instant::now(); + let leaf_proof1 = generate_proof(¶ms, leaf_input_1).unwrap(); + { + let lp = ProofWithVK::deserialize(&leaf_proof1).unwrap(); + let pub1 = PublicInputs::new(&lp.proof.public_inputs); + let (_, ptr) = pub1.mpt_key_info(); + println!("pointer: {}", ptr); + } + println!( + "Proof for leaf 1 generated in {} ms", + now.elapsed().as_millis() + ); + + println!("Proving leaf 2..."); + let leaf_input_2 = CircuitInput::new_receipt_leaf(third_info.clone()); + let now = std::time::Instant::now(); + let leaf_proof2 = generate_proof(¶ms, leaf_input_2).unwrap(); + println!( + "Proof for leaf 2 generated in {} ms", + now.elapsed().as_millis() + ); + + // The branch case for receipts is identical to that of a mapping so we use the same api. + println!("Proving branch..."); + let branch_input = CircuitInput::new_mapping_variable_branch( + second_info.mpt_proof[proof_length_1 - 2].clone(), + vec![leaf_proof1, leaf_proof2], + ); + + let now = std::time::Instant::now(); + generate_proof(¶ms, branch_input).unwrap(); + println!( + "Proof for branch node generated in {} ms", + now.elapsed().as_millis() + ); + } + fn test_circuits(is_simple_aggregation: bool, num_children: usize) { let contract_address = Address::from_str(TEST_CONTRACT_ADDRESS).unwrap(); let chain_id = 10; From f4d4a4bd8a5ba13a1f7c4b736940da586a7f7a97 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 2 Dec 2024 13:52:24 +0000 Subject: [PATCH 11/15] Rebased onto feat/receipt-trie --- mp2-common/src/eth.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index b39a4939a..168a63088 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -4,7 +4,7 @@ use alloy::{ consensus::{ReceiptEnvelope as CRE, ReceiptWithBloom, TxEnvelope}, eips::BlockNumberOrTag, json_abi::Event, - network::{eip2718::Encodable2718, BlockResponse, TransactionResponse}, + network::{eip2718::Encodable2718, BlockResponse}, primitives::{Address, Log, LogData, B256}, providers::{Provider, RootProvider}, rlp::{Decodable, Encodable as AlloyEncodable}, @@ -635,10 +635,7 @@ impl BlockUtil { _ => panic!("aie"), }; - let transaction_primitive = match TxEnvelope::try_from(transaction.clone()) { - Ok(t) => t, - _ => panic!("Couldn't get transaction envelope"), - }; + let transaction_primitive = TxEnvelope::from(transaction.clone()); let body_rlp = receipt_primitive.encoded_2718(); @@ -705,7 +702,7 @@ mod test { use std::str::FromStr; use alloy::{ - network::TransactionBuilder, + network::{TransactionBuilder, TransactionResponse}, node_bindings::Anvil, primitives::{Bytes, Log, U256}, providers::{ext::AnvilApi, Provider, ProviderBuilder}, From 455290f26984e77908ee09a913e8c1c1b826ed14 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 2 Dec 2024 15:06:33 +0000 Subject: [PATCH 12/15] Reworked block extraction to extract all three roots --- mp2-v1/src/api.rs | 6 +- mp2-v1/src/block_extraction/circuit.rs | 203 +++++++++---------- mp2-v1/src/block_extraction/mod.rs | 14 +- mp2-v1/src/block_extraction/public_inputs.rs | 62 +++++- mp2-v1/src/final_extraction/base_circuit.rs | 4 + mp2-v1/tests/common/context.rs | 17 +- mp2-v1/tests/integrated_tests.rs | 5 +- 7 files changed, 176 insertions(+), 135 deletions(-) diff --git a/mp2-v1/src/api.rs b/mp2-v1/src/api.rs index 7bc957b75..129521229 100644 --- a/mp2-v1/src/api.rs +++ b/mp2-v1/src/api.rs @@ -79,9 +79,7 @@ impl PublicParameters { /// Instantiate the circuits employed for the pre-processing stage of LPN, /// returning their corresponding parameters -pub fn build_circuits_params( - extraction_type: block_extraction::ExtractionType, -) -> PublicParameters { +pub fn build_circuits_params() -> PublicParameters { log::info!("Building contract_extraction parameters..."); let contract_extraction = contract_extraction::build_circuits_params(); log::info!("Building length_extraction parameters..."); @@ -89,7 +87,7 @@ pub fn build_circuits_params( log::info!("Building values_extraction parameters..."); let values_extraction = values_extraction::build_circuits_params(); log::info!("Building block_extraction parameters..."); - let block_extraction = block_extraction::build_circuits_params(extraction_type); + let block_extraction = block_extraction::build_circuits_params(); log::info!("Building final_extraction parameters..."); let final_extraction = final_extraction::PublicParameters::build( block_extraction.circuit_data().verifier_data(), diff --git a/mp2-v1/src/block_extraction/circuit.rs b/mp2-v1/src/block_extraction/circuit.rs index f9d51c8f3..4c69fe25d 100644 --- a/mp2-v1/src/block_extraction/circuit.rs +++ b/mp2-v1/src/block_extraction/circuit.rs @@ -86,7 +86,7 @@ impl BlockCircuit { } /// Build the circuit, assigning the public inputs and returning the internal wires. - pub fn build(cb: &mut CBuilder, extraction_type: ExtractionType) -> BlockWires { + pub fn build(cb: &mut CBuilder) -> BlockWires { // already right padded to right size for keccak let rlp_headers = VectorWire::new(cb); @@ -102,10 +102,24 @@ impl BlockCircuit { // extract the state root of the block let state_root: Array = Array::::from_array(create_array(|i| { - rlp_headers.arr.arr[extraction_type.offset() + i] + rlp_headers.arr.arr[HEADER_STATE_ROOT_OFFSET + i] })); let state_root_packed = state_root.pack(cb, Endianness::Little); + // extract the transaction root of the block + let transaction_root: Array = + Array::::from_array(create_array(|i| { + rlp_headers.arr.arr[HEADER_TRANSACTION_ROOT_OFFSET + i] + })); + let transaction_root_packed = transaction_root.pack(cb, Endianness::Little); + + // extract the receipt root of the block + let receipt_root: Array = + Array::::from_array(create_array(|i| { + rlp_headers.arr.arr[HEADER_RECEIPT_ROOT_OFFSET + i] + })); + let receipt_root_packed = receipt_root.pack(cb, Endianness::Little); + // compute the block hash let bh_wires = KeccakCircuit::hash_vector(cb, &rlp_headers); @@ -125,6 +139,8 @@ impl BlockCircuit { &packed_prev_bh.downcast_to_targets().arr, &bn_u256.to_targets(), &state_root_packed.downcast_to_targets().arr, + &transaction_root_packed.downcast_to_targets().arr, + &receipt_root_packed.downcast_to_targets().arr, ) .register(cb); @@ -173,115 +189,88 @@ mod test { use anyhow::Result; #[tokio::test] - async fn prove_and_verify_block_extraction_circuit() -> Result<()> { - prove_and_verify_storage_block_extraction_circuit().await?; - prove_and_verify_receipt_block_extraction_circuit().await - } + pub async fn prove_and_verify_block_extraction_circuit() -> Result<()> { + #[derive(Clone, Debug)] + pub struct TestCircuit { + inner: BlockCircuit, + } - /// Macro used to produce testing functions for the various types of extraction we do. - macro_rules! impl_test_block_circuit { - ($(($fn_name:ident, $extraction:expr)), *) => { - $( - pub async fn $fn_name() -> Result<()> { - #[derive(Clone, Debug)] - pub struct TestCircuit { - inner: BlockCircuit, - } - - impl TestCircuit { - pub fn new(rlp_headers: Vec) -> Result { - crate::block_extraction::circuit::ensure!( - rlp_headers.len() <= crate::block_extraction::circuit::MAX_BLOCK_LEN, - "block rlp headers too long" - ); - Ok(Self {inner: BlockCircuit { rlp_headers }}) - } - } - - impl UserCircuit for TestCircuit { - type Wires = BlockWires; - - fn build(cb: &mut CBuilder) -> Self::Wires { - BlockCircuit::build(cb, $extraction) - } - - fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { - self.inner.assign(pw, wires); - } - } - let url = get_sepolia_url(); - let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); - let block_number = BlockNumberOrTag::Latest; - let block = provider - .get_block_by_number(block_number, true.into()) - .await - .unwrap() - .unwrap(); - - let rlp_headers = block.rlp(); - - let prev_block_hash = block - .header - .parent_hash - .0 - .pack(Endianness::Little) - .to_fields(); - let block_hash = block.block_hash().pack(Endianness::Little).to_fields(); - let root = match $extraction { - super::ExtractionType::Storage => {block - .header - .state_root - .0 - .pack(Endianness::Little) - .to_fields()}, - super::ExtractionType::Receipt => {block - .header - .receipts_root - .0 - .pack(Endianness::Little) - .to_fields()}, - super::ExtractionType::Transaction => {block - .header - .transactions_root - .0 - .pack(Endianness::Little) - .to_fields()}, - - }; - let block_number_buff = block.header.number.to_be_bytes(); - const NUM_LIMBS: usize = u256::NUM_LIMBS; - let block_number = - left_pad_generic::(&block_number_buff.pack(Endianness::Big)) - .to_fields(); - - let setup = setup_circuit::<_, D, C, TestCircuit>(); - let circuit = TestCircuit::new(rlp_headers).unwrap(); - let proof = prove_circuit(&setup, &circuit); - let pi = PublicInputs::::from_slice(&proof.public_inputs); - - assert_eq!(pi.prev_block_hash_raw(), &prev_block_hash); - assert_eq!(pi.block_hash_raw(), &block_hash); - assert_eq!( - pi.block_hash_raw(), - block.header.hash.0.pack(Endianness::Little).to_fields() + impl TestCircuit { + pub fn new(rlp_headers: Vec) -> Result { + crate::block_extraction::circuit::ensure!( + rlp_headers.len() <= crate::block_extraction::circuit::MAX_BLOCK_LEN, + "block rlp headers too long" ); + Ok(Self { + inner: BlockCircuit { rlp_headers }, + }) + } + } + + impl UserCircuit for TestCircuit { + type Wires = BlockWires; + + fn build(cb: &mut CBuilder) -> Self::Wires { + BlockCircuit::build(cb) + } - assert_eq!(pi.state_root_raw(), &root); - assert_eq!(pi.block_number_raw(), &block_number); - Ok(()) + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + self.inner.assign(pw, wires); } - )* } - } + let url = get_sepolia_url(); + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + let block_number = BlockNumberOrTag::Latest; + let block = provider + .get_block_by_number(block_number, true.into()) + .await + .unwrap() + .unwrap(); + + let rlp_headers = block.rlp(); + + let prev_block_hash = block + .header + .parent_hash + .0 + .pack(Endianness::Little) + .to_fields(); + let block_hash = block.block_hash().pack(Endianness::Little).to_fields(); + + let state_root = block.header.state_root.pack(Endianness::Little).to_fields(); + let transaction_root = block + .header + .transactions_root + .pack(Endianness::Little) + .to_fields(); + let receipt_root = block + .header + .receipts_root + .pack(Endianness::Little) + .to_fields(); + + let block_number_buff = block.header.number.to_be_bytes(); + const NUM_LIMBS: usize = u256::NUM_LIMBS; + let block_number = + left_pad_generic::(&block_number_buff.pack(Endianness::Big)) + .to_fields(); + + let setup = setup_circuit::<_, D, C, TestCircuit>(); + let circuit = TestCircuit::new(rlp_headers).unwrap(); + let proof = prove_circuit(&setup, &circuit); + let pi = PublicInputs::::from_slice(&proof.public_inputs); + + assert_eq!(pi.prev_block_hash_raw(), &prev_block_hash); + assert_eq!(pi.block_hash_raw(), &block_hash); + assert_eq!( + pi.block_hash_raw(), + block.header.hash.0.pack(Endianness::Little).to_fields() + ); - impl_test_block_circuit!( - ( - prove_and_verify_storage_block_extraction_circuit, - super::ExtractionType::Storage - ), - ( - prove_and_verify_receipt_block_extraction_circuit, - super::ExtractionType::Receipt - ) - ); + assert_eq!(pi.state_root_raw(), &state_root); + assert_eq!(pi.transaction_root_raw(), &transaction_root); + assert_eq!(pi.receipt_root_raw(), &receipt_root); + assert_eq!(pi.block_number_raw(), &block_number); + Ok(()) + } } diff --git a/mp2-v1/src/block_extraction/mod.rs b/mp2-v1/src/block_extraction/mod.rs index af268f2b9..76347b1fd 100644 --- a/mp2-v1/src/block_extraction/mod.rs +++ b/mp2-v1/src/block_extraction/mod.rs @@ -32,15 +32,15 @@ pub struct PublicParameters { } /// Returns the parameters necessary to prove block extraction circuits -pub fn build_circuits_params(extraction_type: ExtractionType) -> PublicParameters { - PublicParameters::build(extraction_type) +pub fn build_circuits_params() -> PublicParameters { + PublicParameters::build() } impl PublicParameters { - pub fn build(extraction_type: ExtractionType) -> Self { + pub fn build() -> Self { let config = default_config(); let mut cb = CircuitBuilder::new(config); - let wires = circuit::BlockCircuit::build(&mut cb, extraction_type); + let wires = circuit::BlockCircuit::build(&mut cb); let cd = cb.build(); Self { circuit_data: cd, @@ -77,13 +77,11 @@ mod test { }; use mp2_test::eth::get_sepolia_url; - use crate::block_extraction::{ - circuit::ExtractionType, public_inputs::PublicInputs, PublicParameters, - }; + use crate::block_extraction::{public_inputs::PublicInputs, PublicParameters}; #[tokio::test] async fn test_api_storage() -> Result<()> { - let params = PublicParameters::build(ExtractionType::Storage); + let params = PublicParameters::build(); let url = get_sepolia_url(); let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); let block_number = BlockNumberOrTag::Latest; diff --git a/mp2-v1/src/block_extraction/public_inputs.rs b/mp2-v1/src/block_extraction/public_inputs.rs index 143eeac93..e376baf9f 100644 --- a/mp2-v1/src/block_extraction/public_inputs.rs +++ b/mp2-v1/src/block_extraction/public_inputs.rs @@ -12,10 +12,14 @@ use plonky2::iop::target::Target; // - `PREV_BH : [8]F` packed Keccak hash of the block // - `BN : F` Proven block number // - `SH : [8]F` Packed state root hash +// - `TH : [8]F` Packed transaction root hash +// - `RH : [8]F` Packed receipt root hash const BH_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; const PREV_BH_RANGE: PublicInputRange = BH_RANGE.end..BH_RANGE.end + PACKED_HASH_LEN; const BN_RANGE: PublicInputRange = PREV_BH_RANGE.end..PREV_BH_RANGE.end + u256::NUM_LIMBS; const SH_RANGE: PublicInputRange = BN_RANGE.end..BN_RANGE.end + PACKED_HASH_LEN; +const TH_RANGE: PublicInputRange = SH_RANGE.end..SH_RANGE.end + PACKED_HASH_LEN; +const RH_RANGE: PublicInputRange = TH_RANGE.end..TH_RANGE.end + PACKED_HASH_LEN; /// Public inputs for the dynamic-length variable extraction. #[derive(Clone, Debug)] @@ -28,16 +32,29 @@ pub struct PublicInputs<'a, T> { pub(crate) bn: &'a [T], /// Packed state root pub(crate) sh: &'a [T], + /// Packed transaction root + pub(crate) th: &'a [T], + /// Packed receipt root + pub(crate) rh: &'a [T], } impl PublicInputCommon for PublicInputs<'_, Target> { - const RANGES: &'static [PublicInputRange] = &[BH_RANGE, PREV_BH_RANGE, BN_RANGE, SH_RANGE]; + const RANGES: &'static [PublicInputRange] = &[ + BH_RANGE, + PREV_BH_RANGE, + BN_RANGE, + SH_RANGE, + TH_RANGE, + RH_RANGE, + ]; fn register_args(&self, cb: &mut CBuilder) { cb.register_public_inputs(self.bh); cb.register_public_inputs(self.prev_bh); cb.register_public_inputs(self.bn); cb.register_public_inputs(self.sh); + cb.register_public_inputs(self.th); + cb.register_public_inputs(self.rh); } } @@ -48,16 +65,22 @@ impl<'a> PublicInputs<'a, Target> { prev_bh: &'a [Target], bn: &'a [Target], sh: &'a [Target], + th: &'a [Target], + rh: &'a [Target], ) -> Self { assert!(bh.len() == PACKED_HASH_LEN); assert!(prev_bh.len() == PACKED_HASH_LEN); assert!(sh.len() == PACKED_HASH_LEN); + assert!(th.len() == PACKED_HASH_LEN); + assert!(rh.len() == PACKED_HASH_LEN); assert!(bn.len() == u256::NUM_LIMBS); Self { bh, prev_bh, bn, sh, + th, + rh, } } @@ -72,6 +95,14 @@ impl<'a> PublicInputs<'a, Target> { pub fn state_root(&self) -> OutputHash { OutputHash::from_targets(self.sh) } + + pub fn transaction_root(&self) -> OutputHash { + OutputHash::from_targets(self.th) + } + + pub fn receipt_root(&self) -> OutputHash { + OutputHash::from_targets(self.rh) + } } impl PublicInputs<'_, T> { @@ -82,6 +113,8 @@ impl PublicInputs<'_, T> { .chain(self.prev_bh.iter()) .chain(self.bn.iter()) .chain(self.sh.iter()) + .chain(self.th.iter()) + .chain(self.rh.iter()) .cloned() .collect() } @@ -89,19 +122,30 @@ impl PublicInputs<'_, T> { impl<'a, T> PublicInputs<'a, T> { /// Total length of the public inputs. - pub const TOTAL_LEN: usize = SH_RANGE.end; + pub const TOTAL_LEN: usize = RH_RANGE.end; /// Creates a new instance from its internal parts. - pub fn from_parts(bh: &'a [T], prev_bh: &'a [T], bn: &'a [T], sh: &'a [T]) -> Self { + pub fn from_parts( + bh: &'a [T], + prev_bh: &'a [T], + bn: &'a [T], + sh: &'a [T], + th: &'a [T], + rh: &'a [T], + ) -> Self { assert_eq!(bh.len(), BH_RANGE.len()); assert_eq!(prev_bh.len(), PREV_BH_RANGE.len()); assert_eq!(sh.len(), SH_RANGE.len()); + assert_eq!(th.len(), TH_RANGE.len()); + assert_eq!(rh.len(), RH_RANGE.len()); Self { bh, prev_bh, bn, sh, + th, + rh, } } @@ -112,6 +156,8 @@ impl<'a, T> PublicInputs<'a, T> { prev_bh: &pi[PREV_BH_RANGE], bn: &pi[BN_RANGE], sh: &pi[SH_RANGE], + th: &pi[TH_RANGE], + rh: &pi[RH_RANGE], } } @@ -134,4 +180,14 @@ impl<'a, T> PublicInputs<'a, T> { pub const fn state_root_raw(&self) -> &[T] { self.sh } + + /// Returns the packed transaction root hash. + pub const fn transaction_root_raw(&self) -> &[T] { + self.th + } + + /// Returns the packed receipt root hash. + pub const fn receipt_root_raw(&self) -> &[T] { + self.rh + } } diff --git a/mp2-v1/src/final_extraction/base_circuit.rs b/mp2-v1/src/final_extraction/base_circuit.rs index a2b164a86..480a68080 100644 --- a/mp2-v1/src/final_extraction/base_circuit.rs +++ b/mp2-v1/src/final_extraction/base_circuit.rs @@ -441,6 +441,8 @@ pub(crate) mod test { ); let h = &random_vector::(PACKED_HASH_LEN).to_fields(); + let th = &random_vector::(PACKED_HASH_LEN).to_fields(); + let rh = &random_vector::(PACKED_HASH_LEN).to_fields(); let contract_dm = Point::rand(); let key = &random_vector::(MAX_KEY_NIBBLE_LEN).to_fields(); let ptr = &F::NEG_ONE; // simulating end of MPT recursion @@ -467,6 +469,8 @@ pub(crate) mod test { prev_bh: &parent_block_hash, bn: &block_number, sh: h, + th, + rh, } .to_vec(); ProofsPi { diff --git a/mp2-v1/tests/common/context.rs b/mp2-v1/tests/common/context.rs index af78678c1..f7ae3e7a0 100644 --- a/mp2-v1/tests/common/context.rs +++ b/mp2-v1/tests/common/context.rs @@ -12,10 +12,7 @@ use anyhow::{Context, Result}; use envconfig::Envconfig; use log::info; use mp2_common::eth::ProofQuery; -use mp2_v1::{ - api::{build_circuits_params, PublicParameters}, - block_extraction::ExtractionType, -}; +use mp2_v1::api::{build_circuits_params, PublicParameters}; use std::{ fs::File, io::{BufReader, BufWriter}, @@ -93,14 +90,14 @@ pub async fn new_local_chain(storage: ProofKV) -> TestContext { } pub enum ParamsType { - Indexing(ExtractionType), + Indexing, Query, } impl ParamsType { pub fn full_path(&self, mut pre: PathBuf) -> PathBuf { match self { - ParamsType::Indexing(_) => pre.push("index.params"), + ParamsType::Indexing => pre.push("index.params"), ParamsType::Query => pre.push("query.params"), }; pre @@ -116,7 +113,7 @@ impl ParamsType { .context("while parsing MP2 parameters")?; ctx.query_params = Some(params); } - ParamsType::Indexing(_) => { + ParamsType::Indexing => { info!("parsing the indexing mp2-v1 parameters"); let params = bincode::deserialize_from(BufReader::new( File::open(&path).with_context(|| format!("while opening {path:?}"))?, @@ -148,9 +145,9 @@ impl ParamsType { ctx.query_params = Some(params); Ok(()) } - ParamsType::Indexing(et) => { + ParamsType::Indexing => { info!("building the mp2 indexing parameters"); - let mp2 = build_circuits_params(*et); + let mp2 = build_circuits_params(); ctx.params = Some(mp2); info!("writing the mp2-v1 indexing parameters"); Ok(()) @@ -173,7 +170,7 @@ impl ParamsType { )?; Ok(()) } - ParamsType::Indexing(_) => { + ParamsType::Indexing => { bincode::serialize_into( BufWriter::new( File::create(&path).with_context(|| format!("while creating {path:?}"))?, diff --git a/mp2-v1/tests/integrated_tests.rs b/mp2-v1/tests/integrated_tests.rs index 6e8ee3807..54694a385 100644 --- a/mp2-v1/tests/integrated_tests.rs +++ b/mp2-v1/tests/integrated_tests.rs @@ -33,7 +33,7 @@ use common::{ }; use envconfig::Envconfig; use log::info; -use mp2_v1::block_extraction::ExtractionType; + use parsil::{ assembler::DynamicCircuitPis, parse_and_validate, @@ -83,8 +83,7 @@ async fn integrated_indexing() -> Result<()> { let mut ctx = context::new_local_chain(storage).await; info!("Initial Anvil block: {}", ctx.block_number().await); info!("Building indexing params"); - ctx.build_params(ParamsType::Indexing(ExtractionType::Storage)) - .unwrap(); + ctx.build_params(ParamsType::Indexing).unwrap(); info!("Params built"); // NOTE: to comment to avoid very long tests... From f07a1644b7ed5d0fd898abe0bd03af6a9eb41e7d Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Tue, 3 Dec 2024 14:06:00 +0000 Subject: [PATCH 13/15] Added testing for final extraction API --- mp2-v1/src/final_extraction/api.rs | 35 +++ .../src/final_extraction/receipt_circuit.rs | 210 +++++++++++++++++- mp2-v1/src/values_extraction/public_inputs.rs | 2 +- 3 files changed, 243 insertions(+), 4 deletions(-) diff --git a/mp2-v1/src/final_extraction/api.rs b/mp2-v1/src/final_extraction/api.rs index 5cb557bb7..5e7f96498 100644 --- a/mp2-v1/src/final_extraction/api.rs +++ b/mp2-v1/src/final_extraction/api.rs @@ -273,6 +273,7 @@ mod tests { final_extraction::{ base_circuit::{test::ProofsPi, CONTRACT_SET_NUM_IO, VALUE_SET_NUM_IO}, lengthed_circuit::LENGTH_SET_NUM_IO, + receipt_circuit::test::ReceiptsProofsPi, }, length_extraction, }; @@ -296,6 +297,7 @@ mod tests { ); let proof_pis = ProofsPi::random(); + let receipt_proof_pis = ReceiptsProofsPi::generate_from_proof_pi_value(&proof_pis); let length_pis = proof_pis.length_inputs(); let len_dm = length_extraction::PublicInputs::::from_slice(&length_pis).metadata_point(); let block_proof = block_circuit @@ -310,6 +312,13 @@ mod tests { let length_proof = &length_params .generate_input_proofs::<1>([length_pis.try_into().unwrap()]) .unwrap()[0]; + let receipt_proof = &values_params + .generate_input_proofs::<1>([receipt_proof_pis + .value_inputs() + .proof_inputs + .try_into() + .unwrap()]) + .unwrap()[0]; let contract_proof: ProofWithVK = ( contract_proof.clone(), @@ -374,5 +383,31 @@ mod tests { ) .unwrap(); proof_pis.check_proof_public_inputs(proof.proof(), TableDimension::Compound, Some(len_dm)); + + let receipt_proof: ProofWithVK = ( + receipt_proof.clone(), + values_params.verifier_data_for_input_proofs::<1>()[0].clone(), + ) + .into(); + + let circuit_input = CircuitInput::new_receipt_input( + serialize_proof(&block_proof).unwrap(), + receipt_proof.serialize().unwrap(), + ) + .unwrap(); + let proof = ProofWithVK::deserialize( + ¶ms + .generate_receipt_proof( + match circuit_input { + CircuitInput::Receipt(input) => input, + _ => unreachable!(), + }, + values_params.get_recursive_circuit_set(), + ) + .unwrap(), + ) + .unwrap(); + + receipt_proof_pis.check_proof_public_inputs(proof.proof()); } } diff --git a/mp2-v1/src/final_extraction/receipt_circuit.rs b/mp2-v1/src/final_extraction/receipt_circuit.rs index ef536ef83..bce6854eb 100644 --- a/mp2-v1/src/final_extraction/receipt_circuit.rs +++ b/mp2-v1/src/final_extraction/receipt_circuit.rs @@ -2,9 +2,10 @@ use mp2_common::{ default_config, keccak::{OutputHash, PACKED_HASH_LEN}, proof::{deserialize_proof, verify_proof_fixed_circuit, ProofWithVK}, + public_inputs::PublicInputCommon, serialization::{deserialize, serialize}, u256::UInt256Target, - utils::FromTargets, + utils::{FromTargets, ToTargets}, C, D, F, }; use plonky2::{ @@ -29,7 +30,10 @@ use serde::{Deserialize, Serialize}; use crate::{block_extraction, values_extraction}; -use super::api::{FinalExtractionBuilderParams, NUM_IO}; +use super::{ + api::{FinalExtractionBuilderParams, NUM_IO}, + PublicInputs, +}; use anyhow::Result; @@ -67,8 +71,21 @@ impl ReceiptExtractionCircuit { // enforce block_pi.state_root == contract_pi.state_root block_pi - .state_root() + .receipt_root() .enforce_equal(b, &OutputHash::from_targets(value_pi.root_hash_info())); + + PublicInputs::new( + block_pi.bh, + block_pi.prev_bh, + // here the value digest is the same since for length proof, it is assumed the table + // digest is in Compound format (i.e. multiple rows inside digest already). + &value_pi.values_digest_target().to_targets(), + &value_pi.metadata_digest_target().to_targets(), + &block_pi.bn.to_targets(), + &[b._false().target], + ) + .register_args(b); + ReceiptExtractionWires { dm: value_pi.metadata_digest_target(), dv: value_pi.values_digest_target(), @@ -211,3 +228,190 @@ impl ReceiptCircuitProofWires { .get_public_input_targets::() } } + +#[cfg(test)] +pub(crate) mod test { + use std::iter::once; + + use crate::final_extraction::{base_circuit::test::ProofsPi, PublicInputs}; + + use super::*; + use alloy::primitives::U256; + use anyhow::Result; + use itertools::Itertools; + use mp2_common::{ + keccak::PACKED_HASH_LEN, + utils::{Endianness, Packer, ToFields}, + }; + use mp2_test::{ + circuit::{run_circuit, UserCircuit}, + utils::random_vector, + }; + use plonky2::{ + field::types::{PrimeField64, Sample}, + hash::hash_types::HashOut, + iop::witness::WitnessWrite, + plonk::config::GenericHashOut, + }; + use plonky2_ecgfp5::curve::curve::Point; + use values_extraction::public_inputs::tests::new_extraction_public_inputs; + + #[derive(Clone, Debug)] + struct TestReceiptCircuit { + pis: ReceiptsProofsPi, + } + + struct TestReceiptWires { + pis: ReceiptsProofsPiTarget, + } + + impl UserCircuit for TestReceiptCircuit { + type Wires = TestReceiptWires; + fn build(c: &mut CircuitBuilder) -> Self::Wires { + let proofs_pi = ReceiptsProofsPiTarget::new(c); + let _ = ReceiptExtractionCircuit::build(c, &proofs_pi.blocks_pi, &proofs_pi.values_pi); + TestReceiptWires { pis: proofs_pi } + } + fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires) { + wires.pis.assign(pw, &self.pis); + } + } + + #[derive(Clone, Debug)] + pub(crate) struct ReceiptsProofsPiTarget { + pub(crate) blocks_pi: Vec, + pub(crate) values_pi: Vec, + } + + impl ReceiptsProofsPiTarget { + pub(crate) fn new(b: &mut CircuitBuilder) -> Self { + Self { + blocks_pi: b.add_virtual_targets( + block_extraction::public_inputs::PublicInputs::::TOTAL_LEN, + ), + values_pi: b + .add_virtual_targets(values_extraction::PublicInputs::::TOTAL_LEN), + } + } + pub(crate) fn assign(&self, pw: &mut PartialWitness, pis: &ReceiptsProofsPi) { + pw.set_target_arr(&self.values_pi, pis.values_pi.as_ref()); + pw.set_target_arr(&self.blocks_pi, pis.blocks_pi.as_ref()); + } + } + + /// TODO: refactor this struct to mimick exactly the base circuit wires in that it can contain + /// multiple values + #[derive(Clone, Debug)] + pub(crate) struct ReceiptsProofsPi { + pub(crate) blocks_pi: Vec, + pub(crate) values_pi: Vec, + } + + impl ReceiptsProofsPi { + /// Function takes in a [`ProofsPi`] instance and generates a set of values public inputs + /// that agree with the provided receipts root from the `blocks_pi`. + pub(crate) fn generate_from_proof_pi_value(base_info: &ProofsPi) -> ReceiptsProofsPi { + let original = base_info.value_inputs(); + let block_pi = base_info.block_inputs(); + let (k, t) = original.mpt_key_info(); + let new_value_digest = Point::rand(); + let new_metadata_digest = Point::rand(); + let new_values_pi = block_pi + .receipt_root_raw() + .iter() + .chain(k.iter()) + .chain(once(&t)) + .chain(new_value_digest.to_weierstrass().to_fields().iter()) + .chain(new_metadata_digest.to_weierstrass().to_fields().iter()) + .chain(once(&original.n())) + .cloned() + .collect_vec(); + Self { + blocks_pi: base_info.blocks_pi.clone(), + values_pi: new_values_pi, + } + } + + pub(crate) fn block_inputs(&self) -> block_extraction::PublicInputs { + block_extraction::PublicInputs::from_slice(&self.blocks_pi) + } + + pub(crate) fn value_inputs(&self) -> values_extraction::PublicInputs { + values_extraction::PublicInputs::new(&self.values_pi) + } + + /// check public inputs of the proof match with the ones in `self`. + /// `compound_type` is a flag to specify whether `proof` is generated for a simple or compound type + /// `length_dm` is the metadata digest of a length proof, which is provided only for proofs related + /// to a compound type with a length slot + pub(crate) fn check_proof_public_inputs(&self, proof: &ProofWithPublicInputs) { + let proof_pis = PublicInputs::from_slice(&proof.public_inputs); + let block_pi = self.block_inputs(); + + assert_eq!(proof_pis.bn, block_pi.bn); + assert_eq!(proof_pis.h, block_pi.bh); + assert_eq!(proof_pis.ph, block_pi.prev_bh); + + // check digests + let value_pi = self.value_inputs(); + + assert_eq!(proof_pis.value_point(), value_pi.values_digest()); + + assert_eq!(proof_pis.metadata_point(), value_pi.metadata_digest()); + } + + pub(crate) fn random() -> Self { + let value_h = HashOut::::rand().to_bytes().pack(Endianness::Little); + let key = random_vector(64); + let ptr = usize::MAX; + let value_dv = Point::rand(); + let value_dm = Point::rand(); + let n = 10; + let values_pi = new_extraction_public_inputs( + &value_h, + &key, + ptr, + &value_dv.to_weierstrass(), + &value_dm.to_weierstrass(), + n, + ); + + let th = &random_vector::(PACKED_HASH_LEN).to_fields(); + let sh = &random_vector::(PACKED_HASH_LEN).to_fields(); + + // The receipts root and value root need to agree + let rh = &value_h.to_fields(); + + let block_number = U256::from(F::rand().to_canonical_u64()).to_fields(); + let block_hash = HashOut::::rand() + .to_bytes() + .pack(Endianness::Little) + .to_fields(); + let parent_block_hash = HashOut::::rand() + .to_bytes() + .pack(Endianness::Little) + .to_fields(); + let blocks_pi = block_extraction::public_inputs::PublicInputs { + bh: &block_hash, + prev_bh: &parent_block_hash, + bn: &block_number, + sh, + th, + rh, + } + .to_vec(); + ReceiptsProofsPi { + blocks_pi, + values_pi, + } + } + } + + #[test] + fn final_simple_value() -> Result<()> { + let pis = ReceiptsProofsPi::random(); + let test_circuit = TestReceiptCircuit { pis }; + run_circuit::(test_circuit); + Ok(()) + } +} diff --git a/mp2-v1/src/values_extraction/public_inputs.rs b/mp2-v1/src/values_extraction/public_inputs.rs index ee7e39118..806d897b7 100644 --- a/mp2-v1/src/values_extraction/public_inputs.rs +++ b/mp2-v1/src/values_extraction/public_inputs.rs @@ -17,7 +17,7 @@ use plonky2_ecgfp5::{ curve::curve::WeierstrassPoint, gadgets::curve::{CircuitBuilderEcGFp5, CurveTarget}, }; -use std::array; +use std::{array, fmt::Debug}; // Leaf/Extension/Branch node Public Inputs: // - `H : [8]F` packed Keccak hash of the extension node From da688dfda01cc6de42e7d38867f7a0a05987d186 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 9 Dec 2024 11:31:42 +0000 Subject: [PATCH 14/15] value extraction with UpdateTree unit test --- mp2-common/src/eth.rs | 2 +- mp2-v1/src/indexing/mod.rs | 1 + mp2-v1/src/indexing/planner/mod.rs | 4 + .../src/indexing/planner/value_extraction.rs | 181 ++++++++++++++++++ ryhope/src/storage/updatetree.rs | 17 +- 5 files changed, 203 insertions(+), 2 deletions(-) create mode 100644 mp2-v1/src/indexing/planner/mod.rs create mode 100644 mp2-v1/src/indexing/planner/value_extraction.rs diff --git a/mp2-common/src/eth.rs b/mp2-common/src/eth.rs index 168a63088..88661bc7a 100644 --- a/mp2-common/src/eth.rs +++ b/mp2-common/src/eth.rs @@ -964,7 +964,7 @@ mod test { } let block_number = transactions.first().unwrap().block_number.unwrap(); - println!("block number: {block_number}"); + // We want to get the event signature so we can make a ReceiptQuery let all_events = EventEmitter::abi::events(); diff --git a/mp2-v1/src/indexing/mod.rs b/mp2-v1/src/indexing/mod.rs index 90de676e0..73be2843d 100644 --- a/mp2-v1/src/indexing/mod.rs +++ b/mp2-v1/src/indexing/mod.rs @@ -5,6 +5,7 @@ use mp2_common::types::HashOutput; pub mod block; pub mod cell; pub mod index; +pub mod planner; pub mod row; pub type ColumnID = u64; diff --git a/mp2-v1/src/indexing/planner/mod.rs b/mp2-v1/src/indexing/planner/mod.rs new file mode 100644 index 000000000..44cb47b81 --- /dev/null +++ b/mp2-v1/src/indexing/planner/mod.rs @@ -0,0 +1,4 @@ +//! This module contains code for building an [`UpdateTree`] for indexing, for now it is only for the Receipt case. +//! TODO: Add support for storage indexing. + +pub mod value_extraction; diff --git a/mp2-v1/src/indexing/planner/value_extraction.rs b/mp2-v1/src/indexing/planner/value_extraction.rs new file mode 100644 index 000000000..a281de350 --- /dev/null +++ b/mp2-v1/src/indexing/planner/value_extraction.rs @@ -0,0 +1,181 @@ +//! This code returns an [`UpdateTree`] used to plan how we prove a series of values was extracted from a Merkle Patricia Trie. + +use std::iter; + +use alloy::{ + consensus::Header, + primitives::{keccak256, B256}, +}; +use anyhow::{anyhow, Result}; +use mp2_common::eth::Rlpable; +use ryhope::storage::updatetree::UpdateTree; + +/// Given a list MPT proofs, a block [`Header`] and an epoch, this function produces an [`UpdateTree`] +/// for proving correct value extraction. The leaves of the tree represent proofs that have no dependencies. +pub fn produce_update_tree( + paths: &[Vec>], + block_header: &Header, + epoch: i64, +) -> Result> { + // First check that paths is not empty, even if there are no relevant paths we still prove emptiness of the Block. + if paths.is_empty() { + return Err(anyhow!("No paths were provided, there is nothing to prove")); + } + + // Now we make a node for the block proof + let block_hash: [u8; 32] = block_header + .block_hash() + .try_into() + .map_err(|_| anyhow!("Could not convert block hash to fixed length array"))?; + let block_proof_key = B256::from_slice(&block_hash); + + // All of the paths should be from root to leaf, so we append the hash of the trie root and the block as the first element, this corresponds to the final extraction proof + // we also make a two element path consisting of this final key and the block hash. + let final_key = keccak256([keccak256(&paths[0][0]).0, block_hash].concat()); + + // Convert the paths into their keys using keccak + let key_paths = paths + .iter() + .map(|path| { + iter::once(final_key) + .chain(path.iter().map(keccak256)) + .collect::>() + }) + .chain(vec![vec![final_key, block_proof_key]]) + .collect::>>(); + + // Now we make the UpdateTree + Ok(UpdateTree::::from_paths(key_paths, epoch)) +} + +#[cfg(test)] +pub mod tests { + + use std::str::FromStr; + + use alloy::{eips::BlockNumberOrTag, primitives::Address, providers::ProviderBuilder, sol}; + use mp2_common::eth::{BlockUtil, ReceiptProofInfo, ReceiptQuery}; + use mp2_test::eth::get_mainnet_url; + + use super::*; + + #[tokio::test] + async fn test_receipt_update_tree() -> Result<()> { + // First get the info we will feed in to our function + let receipt_proofs = test_receipt_trie_helper().await?; + + let block_info = build_test_data().await; + + let paths = receipt_proofs + .iter() + .map(|info| info.mpt_proof.clone()) + .collect::>>>(); + let header = block_info.block.header.inner.clone(); + let epoch: i64 = 21362445; + + let update_tree = produce_update_tree(&paths, &header, epoch)?; + + // The root of the update tree should be the ahsh of the block hash and the root of the receipt trie. + let block_hash: [u8; 32] = header + .block_hash() + .try_into() + .map_err(|_| anyhow!("Could not convert block hash to fixed length array"))?; + let block_proof_key = B256::from_slice(&block_hash); + + // All of the paths should be from root to leaf, so we append the hash of the trie root and the block as the first element, this corresponds to the final extraction proof + // we also make a two element path consisting of this final key and the block hash. + let final_key = keccak256([header.receipts_root.0, block_hash].concat()); + + assert_eq!(*update_tree.root(), final_key); + + // We check that the immediate children of the root are correct. + update_tree + .node(0) + .children() + .iter() + .for_each(|&child_index| { + let node = update_tree.node(child_index); + + let is_block_hash = *node.key() == block_proof_key; + let is_receipt_hash = *node.key() == header.receipts_root; + + // perform an or on the above two + assert!(is_block_hash || is_receipt_hash); + }); + + // We iterate up the paths from leaf to root to check that each node has the correct parent. + // First we return an iterator over all the nodes + let mut all_nodes = update_tree.descendants(0); + paths.iter().try_for_each(|path| { + // Find the intial state we need for the scan + let leaf_key = keccak256(path.last().ok_or(anyhow!("Path was empty!"))?); + let leaf_node_index = all_nodes + .find(|index| *update_tree.node(*index).key() == leaf_key) + .ok_or(anyhow!("Leaf key did not exist in tree"))?; + // Since in the `produce_update_tree` function we append a node to each of these paths at the start if all the nodes are included the following shoult return `0usize`. + let final_parent = path + .iter() + .rev() + .try_fold(leaf_node_index, |state, node| { + let tree_node = update_tree.node(state); + if *tree_node.key() == keccak256(node) { + tree_node.parent() + } else { + None + } + }) + .ok_or(anyhow!("final parent was a None value"))?; + + assert_eq!(final_parent, 0usize); + Ok(()) + }) + } + + /// Function that fetches a block together with its transaction trie and receipt trie for testing purposes. + async fn build_test_data() -> BlockUtil { + let url = get_mainnet_url(); + // get some tx and receipt + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + + // We fetch a specific block which we know includes transactions relating to the PudgyPenguins contract. + BlockUtil::fetch(&provider, BlockNumberOrTag::Number(21362445)) + .await + .unwrap() + } + + /// Function to build a list of [`ReceiptProofInfo`] for a set block. + async fn test_receipt_trie_helper() -> Result> { + // First we choose the contract and event we are going to monitor. + // We use the mainnet PudgyPenguins contract at address 0xbd3531da5cf5857e7cfaa92426877b022e612cf8 + // and monitor for the `Approval` event. + let address = Address::from_str("0xbd3531da5cf5857e7cfaa92426877b022e612cf8")?; + + // We have to create what the event abi looks like + sol! { + #[allow(missing_docs)] + #[sol(rpc, abi)] + contract EventTest { + #[derive(Debug)] + event ApprovalForAll(address indexed owner, address indexed operator, bool approved); + + } + }; + + let approval_event = EventTest::abi::events() + .get("ApprovalForAll") + .ok_or(anyhow!("No ApprovalForAll event exists"))?[0] + .clone(); + + let query = ReceiptQuery::new(address, approval_event); + + // Spin up a RootProvider + let url = get_mainnet_url(); + + let provider = ProviderBuilder::new().on_http(url.parse().unwrap()); + + // fetch the list of proofs + query + .query_receipt_proofs(&provider, BlockNumberOrTag::Number(21362445)) + .await + } +} diff --git a/ryhope/src/storage/updatetree.rs b/ryhope/src/storage/updatetree.rs index 32d34cc3e..64b612a01 100644 --- a/ryhope/src/storage/updatetree.rs +++ b/ryhope/src/storage/updatetree.rs @@ -40,6 +40,21 @@ impl UpdateTreeNode { fn is_leaf(&self) -> bool { self.children.is_empty() } + + /// Getter for the key + pub fn key(&self) -> &K { + &self.k + } + + /// Getter for the children of this node + pub fn children(&self) -> &BTreeSet { + &self.children + } + + /// Getter for the parent if it exists + pub fn parent(&self) -> Option { + self.parent + } } impl UpdateTree { @@ -47,7 +62,7 @@ impl UpdateTree { &self.nodes[0].k } - fn node(&self, i: usize) -> &UpdateTreeNode { + pub fn node(&self, i: usize) -> &UpdateTreeNode { &self.nodes[i] } From 31f8fbd6a195ac59baab653e1d98fc59e9b02fc8 Mon Sep 17 00:00:00 2001 From: Zack Youell Date: Mon, 9 Dec 2024 14:12:20 +0000 Subject: [PATCH 15/15] Slightly genericises input --- mp2-v1/src/indexing/planner/mod.rs | 1 + mp2-v1/src/indexing/planner/ports/input.rs | 7 ++ .../src/indexing/planner/ports/input_impl.rs | 11 +++ mp2-v1/src/indexing/planner/ports/mod.rs | 4 + .../src/indexing/planner/value_extraction.rs | 73 +++++++++---------- 5 files changed, 59 insertions(+), 37 deletions(-) create mode 100644 mp2-v1/src/indexing/planner/ports/input.rs create mode 100644 mp2-v1/src/indexing/planner/ports/input_impl.rs create mode 100644 mp2-v1/src/indexing/planner/ports/mod.rs diff --git a/mp2-v1/src/indexing/planner/mod.rs b/mp2-v1/src/indexing/planner/mod.rs index 44cb47b81..e6d0213aa 100644 --- a/mp2-v1/src/indexing/planner/mod.rs +++ b/mp2-v1/src/indexing/planner/mod.rs @@ -1,4 +1,5 @@ //! This module contains code for building an [`UpdateTree`] for indexing, for now it is only for the Receipt case. //! TODO: Add support for storage indexing. +pub mod ports; pub mod value_extraction; diff --git a/mp2-v1/src/indexing/planner/ports/input.rs b/mp2-v1/src/indexing/planner/ports/input.rs new file mode 100644 index 000000000..9921992d0 --- /dev/null +++ b/mp2-v1/src/indexing/planner/ports/input.rs @@ -0,0 +1,7 @@ +//! This module defines ports that should be implemented to feed data into the update planner. + +/// This trait is implemented by anything that we can generate a value extraction proof for. +pub trait Extractable { + /// This method returns the MPT inclusion proof that the data must have to be extractable. + fn to_path(&self) -> Vec>; +} diff --git a/mp2-v1/src/indexing/planner/ports/input_impl.rs b/mp2-v1/src/indexing/planner/ports/input_impl.rs new file mode 100644 index 000000000..da595acf0 --- /dev/null +++ b/mp2-v1/src/indexing/planner/ports/input_impl.rs @@ -0,0 +1,11 @@ +//! Implementations of the traits found in [`crate::indexing::planner::ports::input`]. + +use mp2_common::eth::ReceiptProofInfo; + +use super::input::Extractable; + +impl Extractable for ReceiptProofInfo { + fn to_path(&self) -> Vec> { + self.mpt_proof.clone() + } +} diff --git a/mp2-v1/src/indexing/planner/ports/mod.rs b/mp2-v1/src/indexing/planner/ports/mod.rs new file mode 100644 index 000000000..399d53264 --- /dev/null +++ b/mp2-v1/src/indexing/planner/ports/mod.rs @@ -0,0 +1,4 @@ +//! Module defining the interfaces that need to be implemented to feed data into update planner and what it pushes out. + +pub mod input; +pub mod input_impl; diff --git a/mp2-v1/src/indexing/planner/value_extraction.rs b/mp2-v1/src/indexing/planner/value_extraction.rs index a281de350..36a728879 100644 --- a/mp2-v1/src/indexing/planner/value_extraction.rs +++ b/mp2-v1/src/indexing/planner/value_extraction.rs @@ -1,7 +1,6 @@ //! This code returns an [`UpdateTree`] used to plan how we prove a series of values was extracted from a Merkle Patricia Trie. -use std::iter; - +use super::ports::input::Extractable; use alloy::{ consensus::Header, primitives::{keccak256, B256}, @@ -9,16 +8,17 @@ use alloy::{ use anyhow::{anyhow, Result}; use mp2_common::eth::Rlpable; use ryhope::storage::updatetree::UpdateTree; +use std::iter; /// Given a list MPT proofs, a block [`Header`] and an epoch, this function produces an [`UpdateTree`] /// for proving correct value extraction. The leaves of the tree represent proofs that have no dependencies. -pub fn produce_update_tree( - paths: &[Vec>], +pub fn produce_update_tree( + data: &[E], block_header: &Header, epoch: i64, ) -> Result> { // First check that paths is not empty, even if there are no relevant paths we still prove emptiness of the Block. - if paths.is_empty() { + if data.is_empty() { return Err(anyhow!("No paths were provided, there is nothing to prove")); } @@ -31,14 +31,14 @@ pub fn produce_update_tree( // All of the paths should be from root to leaf, so we append the hash of the trie root and the block as the first element, this corresponds to the final extraction proof // we also make a two element path consisting of this final key and the block hash. - let final_key = keccak256([keccak256(&paths[0][0]).0, block_hash].concat()); + let final_key = keccak256([keccak256(&data[0].to_path()[0]).0, block_hash].concat()); // Convert the paths into their keys using keccak - let key_paths = paths + let key_paths = data .iter() - .map(|path| { + .map(|input| { iter::once(final_key) - .chain(path.iter().map(keccak256)) + .chain(input.to_path().iter().map(keccak256)) .collect::>() }) .chain(vec![vec![final_key, block_proof_key]]) @@ -66,14 +66,10 @@ pub mod tests { let block_info = build_test_data().await; - let paths = receipt_proofs - .iter() - .map(|info| info.mpt_proof.clone()) - .collect::>>>(); let header = block_info.block.header.inner.clone(); let epoch: i64 = 21362445; - let update_tree = produce_update_tree(&paths, &header, epoch)?; + let update_tree = produce_update_tree(&receipt_proofs, &header, epoch)?; // The root of the update tree should be the ahsh of the block hash and the root of the receipt trie. let block_hash: [u8; 32] = header @@ -106,29 +102,32 @@ pub mod tests { // We iterate up the paths from leaf to root to check that each node has the correct parent. // First we return an iterator over all the nodes let mut all_nodes = update_tree.descendants(0); - paths.iter().try_for_each(|path| { - // Find the intial state we need for the scan - let leaf_key = keccak256(path.last().ok_or(anyhow!("Path was empty!"))?); - let leaf_node_index = all_nodes - .find(|index| *update_tree.node(*index).key() == leaf_key) - .ok_or(anyhow!("Leaf key did not exist in tree"))?; - // Since in the `produce_update_tree` function we append a node to each of these paths at the start if all the nodes are included the following shoult return `0usize`. - let final_parent = path - .iter() - .rev() - .try_fold(leaf_node_index, |state, node| { - let tree_node = update_tree.node(state); - if *tree_node.key() == keccak256(node) { - tree_node.parent() - } else { - None - } - }) - .ok_or(anyhow!("final parent was a None value"))?; - - assert_eq!(final_parent, 0usize); - Ok(()) - }) + receipt_proofs + .iter() + .map(Extractable::to_path) + .try_for_each(|path| { + // Find the intial state we need for the scan + let leaf_key = keccak256(path.last().ok_or(anyhow!("Path was empty!"))?); + let leaf_node_index = all_nodes + .find(|index| *update_tree.node(*index).key() == leaf_key) + .ok_or(anyhow!("Leaf key did not exist in tree"))?; + // Since in the `produce_update_tree` function we append a node to each of these paths at the start if all the nodes are included the following shoult return `0usize`. + let final_parent = path + .iter() + .rev() + .try_fold(leaf_node_index, |state, node| { + let tree_node = update_tree.node(state); + if *tree_node.key() == keccak256(node) { + tree_node.parent() + } else { + None + } + }) + .ok_or(anyhow!("final parent was a None value"))?; + + assert_eq!(final_parent, 0usize); + Ok(()) + }) } /// Function that fetches a block together with its transaction trie and receipt trie for testing purposes.