From 0799efcf3628e057f97b3f716dbc24c7424f0432 Mon Sep 17 00:00:00 2001 From: Diego Nehab <1635557+diegonehab@users.noreply.github.com> Date: Tue, 25 Jul 2023 18:20:39 -0300 Subject: [PATCH] refactor: simplify machine hash computation in tests --- src/Makefile | 4 +- src/back-merkle-tree.cpp | 48 ++++- src/back-merkle-tree.h | 30 ++- src/test-machine-c-api.cpp | 11 +- src/test-utils.h | 413 +++++++------------------------------ src/tests/machine-bind.lua | 15 +- src/tests/machine-test.lua | 84 +++----- src/tests/util.lua | 383 +++++++++++----------------------- 8 files changed, 309 insertions(+), 679 deletions(-) diff --git a/src/Makefile b/src/Makefile index e08f55f12..7779f6155 100644 --- a/src/Makefile +++ b/src/Makefile @@ -544,7 +544,9 @@ TEST_MERKLE_TREE_HASH_OBJS:= \ test-merkle-tree-hash.o TEST_MACHINE_C_API_OBJS:= \ - test-machine-c-api.o + test-machine-c-api.o \ + back-merkle-tree.o \ + pristine-merkle-tree.o PROTO_OBJS:= \ $(PROTOBUF_GEN_OBJS) \ diff --git a/src/back-merkle-tree.cpp b/src/back-merkle-tree.cpp index 4c42f5c5b..80afd417b 100644 --- a/src/back-merkle-tree.cpp +++ b/src/back-merkle-tree.cpp @@ -15,6 +15,8 @@ // #include "back-merkle-tree.h" +#include +#include #include /// \file @@ -44,14 +46,14 @@ back_merkle_tree::back_merkle_tree(int log2_root_size, int log2_leaf_size, int l if (log2_word_size > log2_leaf_size) { throw std::out_of_range{"log2_word_size is greater than log2_word_size"}; } - if (log2_root_size >= std::numeric_limits::digits) { + if (log2_root_size - m_log2_leaf_size >= std::numeric_limits::digits) { throw std::out_of_range{"tree is too large for address type"}; } } -void back_merkle_tree::push_back(const hash_type &leaf_hash) { +void back_merkle_tree::push_back(const hash_type &new_leaf_hash) { hasher_type h; - hash_type right = leaf_hash; + hash_type right = new_leaf_hash; if (m_leaf_count >= m_max_leaves) { throw std::out_of_range{"too many leaves"}; } @@ -68,6 +70,46 @@ void back_merkle_tree::push_back(const hash_type &leaf_hash) { ++m_leaf_count; } +void back_merkle_tree::pad_back(uint64_t new_leaf_count) { + hasher_type h; + if (new_leaf_count > m_max_leaves || m_leaf_count + new_leaf_count > m_max_leaves) { + throw std::invalid_argument("too many leaves"); + } + const int depth = m_log2_root_size - m_log2_leaf_size; + for (int j = 0; j <= depth; ++j) { + const uint64_t j_span = address_type{1} << j; + if (j_span > new_leaf_count) { + break; + } + // is our smallest tree at depth j? + if ((m_leaf_count & j_span) != 0) { + // if so, we can add 2^j pristine leaves directly + auto right = m_pristine_hashes.get_hash(m_log2_leaf_size + j); + for (int i = j; i <= depth; ++i) { + const uint64_t i_span = address_type{1} << i; + if ((m_leaf_count & i_span) != 0) { + const auto &left = m_context[i]; + get_concat_hash(h, left, right, right); + } else { + m_context[i] = right; + break; + } + } + new_leaf_count = new_leaf_count - j_span; + m_leaf_count = m_leaf_count + j_span; + } + } + // now add the rest of the padding directly to the context + for (int i = 0; i <= depth; ++i) { + const uint64_t i_span = address_type{1} << i; + if ((new_leaf_count & i_span) != 0) { + m_context[i] = m_pristine_hashes.get_hash(m_log2_leaf_size + i); + new_leaf_count = new_leaf_count - i_span; + m_leaf_count = m_leaf_count + i_span; + } + } +} + back_merkle_tree::hash_type back_merkle_tree::get_root_hash(void) const { hasher_type h; assert(m_leaf_count <= m_max_leaves); diff --git a/src/back-merkle-tree.h b/src/back-merkle-tree.h index 8a3ffe5c0..c416f7621 100644 --- a/src/back-merkle-tree.h +++ b/src/back-merkle-tree.h @@ -51,15 +51,15 @@ class back_merkle_tree { /// \brief Constructor /// \param log2_root_size Log2 of root node /// \param log2_leaf_size Log2 of leaf node - /// \param log2_word_size Log2 of word + /// \param log2_word_size Log2 of word node back_merkle_tree(int log2_root_size, int log2_leaf_size, int log2_word_size); /// \brief Appends a new hash to the tree - /// \param hash Hash of leaf data + /// \param new_leaf_hash Hash of new leaf data /// \details /// Consider the tree down to the leaf level. - /// The tree is only complete after 2^(log2_root_size-log2_leaf_size) leaves - /// have been added. + /// The tree is only complete after 2^(log2_root_size-log2_leaf_size) + /// leaves have been added. /// Before that, when leaf_count leaves have been added, we assume the rest /// of the leaves are filled with zeros (i.e., they are pristine). /// The trick is that we do not need to store the hashes of all leaf_count @@ -85,7 +85,27 @@ class back_merkle_tree { /// If the bit is not set, we simply store context[i] = right and break /// In other words, we can update the context in /// log time (log2_root_size-log2_leaf_size) - void push_back(const hash_type &leaf_hash); + void push_back(const hash_type &new_leaf_hash); + + /// \brief Appends a number of padding hashes to the tree + /// \param leaf_count Number of padding hashes to append + /// \details + /// Recall that a bit i set in leaf_count represents a complete subtree + /// of size 2^i for which we have a hash in context[i]. + /// The remaining entries in the context are unused. + /// The base case is when the least significant bit set in leaf_count is + /// bigger than new_leaf_count. + /// We can simply add to context[j] a pristine subtree of size 2^j + /// for each bit j set in new_leaf_count. + /// No used used entry in the context will be overwritten. + /// We can then simply add new_leaf_count to leaf_count and we are done. + /// In the general case, the least significant bit set i in leaf_count is + /// less than or equal to new_leaf_count. + /// Here, we add a pristine subtree of size 2^i to the context and + /// bubble up. + /// We add 2^i to leaf_count and subtract 2^i from new_leaf_count. + /// Then we repeat this process until we reach the base case. + void pad_back(uint64_t new_leaf_count); /// \brief Returns the root tree hash /// \returns Root tree hash diff --git a/src/test-machine-c-api.cpp b/src/test-machine-c-api.cpp index ac9e21a0c..2ff68aaf5 100644 --- a/src/test-machine-c-api.cpp +++ b/src/test-machine-c-api.cpp @@ -35,15 +35,15 @@ #define BOOST_FIXTURE_TEST_CASE_NOLINT(...) BOOST_FIXTURE_TEST_CASE(__VA_ARGS__) static hash_type get_verification_root_hash(cm_machine *machine) { - std::vector dump_list{ + std::vector dump_list{{ "0000000000000000--0000000000001000.bin", // shadow state - "0000000000001000--000000000000f000.bin", // dtb "0000000000010000--0000000000001000.bin", // shadow pmas "0000000000020000--0000000000006000.bin", // shadow tlb "0000000002000000--00000000000c0000.bin", // clint "0000000040008000--0000000000001000.bin", // htif + "000000007ff00000--0000000000100000.bin", // dtb "0000000080000000--0000000000100000.bin", // ram - }; + }}; char *err_msg{}; int error_code = cm_dump_pmas(machine, &err_msg); @@ -232,6 +232,7 @@ class incomplete_machine_fixture : public default_machine_fixture { target->uarch.ram.image_filename = new_cstr(source->uarch.ram.image_filename); target->uarch.ram.length = source->uarch.ram.length; + target->uarch.processor = source->uarch.processor; } static void _cleanup_machine_config(cm_machine_config *config) { @@ -1484,11 +1485,11 @@ BOOST_FIXTURE_TEST_CASE_NOLINT(get_initial_config_flash_drive_test, flash_drive_ BOOST_FIXTURE_TEST_CASE_NOLINT(dump_pmas_null_placeholder_test, flash_drive_machine_fixture) { std::array dump_list{ "0000000000000000--0000000000001000.bin", // shadow state - "0000000000001000--000000000000f000.bin", // dtb "0000000000010000--0000000000001000.bin", // shadow pmas "0000000000020000--0000000000006000.bin", // shadow tlb "0000000002000000--00000000000c0000.bin", // clint "0000000040008000--0000000000001000.bin", // htif + "000000007ff00000--0000000000100000.bin", // dtb "0000000080000000--0000000000100000.bin", // ram "0080000000000000--0000000003c00000.bin" // flash drive }; @@ -1505,11 +1506,11 @@ BOOST_FIXTURE_TEST_CASE_NOLINT(dump_pmas_null_placeholder_test, flash_drive_mach BOOST_FIXTURE_TEST_CASE_NOLINT(dump_pmas_basic_test, flash_drive_machine_fixture) { std::array dump_list{ "0000000000000000--0000000000001000.bin", // shadow state - "0000000000001000--000000000000f000.bin", // dtb "0000000000010000--0000000000001000.bin", // shadow pmas "0000000000020000--0000000000006000.bin", // shadow tlb "0000000002000000--00000000000c0000.bin", // clint "0000000040008000--0000000000001000.bin", // htif + "000000007ff00000--0000000000100000.bin", // dtb "0000000080000000--0000000000100000.bin", // ram "0080000000000000--0000000003c00000.bin" // flash drive }; diff --git a/src/test-utils.h b/src/test-utils.h index f18b82fe8..8734e51e0 100644 --- a/src/test-utils.h +++ b/src/test-utils.h @@ -20,282 +20,65 @@ #include #include -#include "i-hasher.h" +#include "back-merkle-tree.h" #include "keccak-256-hasher.h" #include "machine-c-api.h" -#include "machine-merkle-tree.h" #include "pma-constants.h" using hash_type = cartesi::keccak_256_hasher::hash_type; -std::array zero_keccak_hash_table{hash_type{}, hash_type{}, hash_type{}, - hash_type{0x01, 0x1b, 0x4d, 0x03, 0xdd, 0x8c, 0x01, 0xf1, 0x04, 0x91, 0x43, 0xcf, 0x9c, 0x4c, 0x81, 0x7e, 0x4b, - 0x16, 0x7f, 0x1d, 0x1b, 0x83, 0xe5, 0xc6, 0xf0, 0xf1, 0x0d, 0x89, 0xba, 0x1e, 0x7b, 0xce}, - hash_type{0x4d, 0x94, 0x70, 0xa8, 0x21, 0xfb, 0xe9, 0x01, 0x17, 0xec, 0x35, 0x7e, 0x30, 0xba, 0xd9, 0x30, 0x57, - 0x32, 0xfb, 0x19, 0xdd, 0xf5, 0x4a, 0x07, 0xdd, 0x3e, 0x29, 0xf4, 0x40, 0x61, 0x92, 0x54}, - hash_type{0xae, 0x39, 0xce, 0x85, 0x37, 0xac, 0xa7, 0x5e, 0x2e, 0xff, 0x3e, 0x38, 0xc9, 0x80, 0x11, 0xdf, 0xe9, - 0x34, 0xe7, 0x00, 0xa0, 0x96, 0x77, 0x32, 0xfc, 0x07, 0xb4, 0x30, 0xdd, 0x65, 0x6a, 0x23}, - hash_type{0x3f, 0xc9, 0xa1, 0x5f, 0x5b, 0x48, 0x69, 0xc8, 0x72, 0xf8, 0x10, 0x87, 0xbb, 0x61, 0x04, 0xb7, 0xd6, - 0x3e, 0x6f, 0x9a, 0xb4, 0x7f, 0x2c, 0x43, 0xf3, 0x53, 0x5e, 0xae, 0x71, 0x72, 0xaa, 0x7f}, - hash_type{0x17, 0xd2, 0xdd, 0x61, 0x4c, 0xdd, 0xaa, 0x4d, 0x87, 0x92, 0x76, 0xb1, 0x1e, 0x06, 0x72, 0xc9, 0x56, - 0x00, 0x33, 0xd3, 0xe8, 0x45, 0x3a, 0x1d, 0x04, 0x53, 0x39, 0xd3, 0x4b, 0xa6, 0x01, 0xb9}, - hash_type{0xc3, 0x7b, 0x8b, 0x13, 0xca, 0x95, 0x16, 0x6f, 0xb7, 0xaf, 0x16, 0x98, 0x8a, 0x70, 0xfc, 0xc9, 0x0f, - 0x38, 0xbf, 0x91, 0x26, 0xfd, 0x83, 0x3d, 0xa7, 0x10, 0xa4, 0x7f, 0xb3, 0x7a, 0x55, 0xe6}, - hash_type{0x8e, 0x7a, 0x42, 0x7f, 0xa9, 0x43, 0xd9, 0x96, 0x6b, 0x38, 0x9f, 0x4f, 0x25, 0x71, 0x73, 0x67, 0x60, - 0x90, 0xc6, 0xe9, 0x5f, 0x43, 0xe2, 0xcb, 0x6d, 0x65, 0xf8, 0x75, 0x81, 0x11, 0xe3, 0x09}, - hash_type{0x30, 0xb0, 0xb9, 0xde, 0xb7, 0x3e, 0x15, 0x5c, 0x59, 0x74, 0x0b, 0xac, 0xf1, 0x4a, 0x6f, 0xf0, 0x4b, - 0x64, 0xbb, 0x8e, 0x20, 0x1a, 0x50, 0x64, 0x09, 0xc3, 0xfe, 0x38, 0x1c, 0xa4, 0xea, 0x90}, - hash_type{0xcd, 0x5d, 0xea, 0xc7, 0x29, 0xd0, 0xfd, 0xac, 0xcc, 0x44, 0x1d, 0x09, 0xd7, 0x32, 0x5f, 0x41, 0x58, - 0x6b, 0xa1, 0x3c, 0x80, 0x1b, 0x7e, 0xcc, 0xae, 0x0f, 0x95, 0xd8, 0xf3, 0x93, 0x3e, 0xfe}, - hash_type{0xd8, 0xb9, 0x6e, 0x5b, 0x7f, 0x6f, 0x45, 0x9e, 0x9c, 0xb6, 0xa2, 0xf4, 0x1b, 0xf2, 0x76, 0xc7, 0xb8, - 0x5c, 0x10, 0xcd, 0x46, 0x62, 0xc0, 0x4c, 0xbb, 0xb3, 0x65, 0x43, 0x47, 0x26, 0xc0, 0xa0}, - hash_type{0xc9, 0x69, 0x53, 0x93, 0x02, 0x7f, 0xb1, 0x06, 0xa8, 0x15, 0x31, 0x09, 0xac, 0x51, 0x62, 0x88, 0xa8, - 0x8b, 0x28, 0xa9, 0x38, 0x17, 0x89, 0x94, 0x60, 0xd6, 0x31, 0x0b, 0x71, 0xcf, 0x1e, 0x61}, - hash_type{0x63, 0xe8, 0x80, 0x6f, 0xa0, 0xd4, 0xb1, 0x97, 0xa2, 0x59, 0xe8, 0xc3, 0xac, 0x28, 0x86, 0x42, 0x68, - 0x15, 0x9d, 0x0a, 0xc8, 0x5f, 0x85, 0x81, 0xca, 0x28, 0xfa, 0x7d, 0x2c, 0x0c, 0x03, 0xeb}, - hash_type{0x91, 0xe3, 0xee, 0xe5, 0xca, 0x7a, 0x3d, 0xa2, 0xb3, 0x05, 0x3c, 0x97, 0x70, 0xdb, 0x73, 0x59, 0x9f, - 0xb1, 0x49, 0xf6, 0x20, 0xe3, 0xfa, 0xce, 0xf9, 0x5e, 0x94, 0x7c, 0x0e, 0xe8, 0x60, 0xb7}, - hash_type{0x21, 0x22, 0xe3, 0x1e, 0x4b, 0xbd, 0x2b, 0x7c, 0x78, 0x3d, 0x79, 0xcc, 0x30, 0xf6, 0x0c, 0x62, 0x38, - 0x65, 0x1d, 0xa7, 0xf0, 0x72, 0x6f, 0x76, 0x7d, 0x22, 0x74, 0x72, 0x64, 0xfd, 0xb0, 0x46}, - hash_type{0xf7, 0x54, 0x9f, 0x26, 0xcc, 0x70, 0xed, 0x5e, 0x18, 0xba, 0xeb, 0x6c, 0x81, 0xbb, 0x06, 0x25, 0xcb, - 0x95, 0xbb, 0x40, 0x19, 0xae, 0xec, 0xd4, 0x07, 0x74, 0xee, 0x87, 0xae, 0x29, 0xec, 0x51}, - hash_type{0x7a, 0x71, 0xf6, 0xee, 0x26, 0x4c, 0x5d, 0x76, 0x13, 0x79, 0xb3, 0xd7, 0xd6, 0x17, 0xca, 0x83, 0x67, - 0x73, 0x74, 0xb4, 0x9d, 0x10, 0xae, 0xc5, 0x05, 0x05, 0xac, 0x08, 0x74, 0x08, 0xca, 0x89}, - hash_type{0x2b, 0x57, 0x3c, 0x26, 0x7a, 0x71, 0x2a, 0x52, 0xe1, 0xd0, 0x64, 0x21, 0xfe, 0x27, 0x6a, 0x03, 0xef, - 0xb1, 0x88, 0x9f, 0x33, 0x72, 0x01, 0x11, 0x0f, 0xdc, 0x32, 0xa8, 0x1f, 0x8e, 0x15, 0x24}, - hash_type{0x99, 0xaf, 0x66, 0x58, 0x35, 0xaa, 0xbf, 0xdc, 0x67, 0x40, 0xc7, 0xe2, 0xc3, 0x79, 0x1a, 0x31, 0xc3, - 0xcd, 0xc9, 0xf5, 0xab, 0x96, 0x2f, 0x68, 0x1b, 0x12, 0xfc, 0x09, 0x28, 0x16, 0xa6, 0x2f}, - hash_type{0x27, 0xd8, 0x60, 0x25, 0x59, 0x9a, 0x41, 0x23, 0x38, 0x48, 0x70, 0x2f, 0x0c, 0xfc, 0x04, 0x37, 0xb4, - 0x45, 0x68, 0x2d, 0xf5, 0x11, 0x47, 0xa6, 0x32, 0xa0, 0xa0, 0x83, 0xd2, 0xd3, 0x8b, 0x5e}, - hash_type{0x13, 0xe4, 0x66, 0xa8, 0x93, 0x5a, 0xff, 0xf5, 0x8b, 0xb5, 0x33, 0xb3, 0xef, 0x5d, 0x27, 0xfb, 0xa6, - 0x3e, 0xe6, 0xb0, 0xfd, 0x9e, 0x67, 0xff, 0x20, 0xaf, 0x9d, 0x50, 0xde, 0xee, 0x3f, 0x8b}, - hash_type{0xf0, 0x65, 0xec, 0x22, 0x0c, 0x1f, 0xd4, 0xba, 0x57, 0xe3, 0x41, 0x26, 0x1d, 0x55, 0x99, 0x7f, 0x85, - 0xd6, 0x6d, 0x32, 0x15, 0x25, 0x26, 0x73, 0x68, 0x72, 0x69, 0x3d, 0x2b, 0x43, 0x7a, 0x23}, - hash_type{0x3e, 0x23, 0x37, 0xb7, 0x15, 0xf6, 0xac, 0x9a, 0x6a, 0x27, 0x26, 0x22, 0xfd, 0xc2, 0xd6, 0x7f, 0xcf, - 0xe1, 0xda, 0x34, 0x59, 0xf8, 0xda, 0xb4, 0xed, 0x7e, 0x40, 0xa6, 0x57, 0xa5, 0x4c, 0x36}, - hash_type{0x76, 0x6c, 0x5e, 0x8a, 0xc9, 0xa8, 0x8b, 0x35, 0xb0, 0x5c, 0x34, 0x74, 0x7e, 0x65, 0x07, 0xf6, 0xb0, - 0x44, 0xab, 0x66, 0x18, 0x0d, 0xc7, 0x6a, 0xc1, 0xa6, 0x96, 0xde, 0x03, 0x18, 0x95, 0x93}, - hash_type{0xfe, 0xdc, 0x0d, 0x0d, 0xbb, 0xd8, 0x55, 0xc8, 0xea, 0xd6, 0x73, 0x54, 0x48, 0x99, 0xb0, 0x96, 0x0e, - 0x4a, 0x5a, 0x7c, 0xa4, 0x3b, 0x4e, 0xf9, 0x0a, 0xfe, 0x60, 0x7d, 0xe7, 0x69, 0x8c, 0xae}, - hash_type{0xfd, 0xc2, 0x42, 0x78, 0x8f, 0x65, 0x4b, 0x57, 0xa4, 0xfb, 0x32, 0xa7, 0x1b, 0x33, 0x5e, 0xf6, 0xff, - 0x9a, 0x4c, 0xc1, 0x18, 0xb2, 0x82, 0xb5, 0x3b, 0xdd, 0x6d, 0x61, 0x92, 0xb7, 0xa8, 0x2c}, - hash_type{0x3c, 0x51, 0x26, 0xb9, 0xc7, 0xe3, 0x3c, 0x8e, 0x5a, 0x5a, 0xc9, 0x73, 0x8b, 0x8b, 0xd3, 0x12, 0x47, - 0xfb, 0x74, 0x02, 0x05, 0x4f, 0x97, 0xb5, 0x73, 0xe8, 0xab, 0xb9, 0xfa, 0xad, 0x21, 0x9f}, - hash_type{0x4f, 0xd0, 0x85, 0xac, 0xea, 0xa7, 0xf5, 0x42, 0xd7, 0x87, 0xee, 0x41, 0x96, 0xd3, 0x65, 0xf3, 0xcc, - 0x56, 0x6e, 0x7b, 0xbc, 0xfb, 0xfd, 0x45, 0x12, 0x30, 0xc4, 0x8d, 0x80, 0x4c, 0x01, 0x7d}, - hash_type{0x21, 0xe2, 0xd8, 0xfa, 0x91, 0x4e, 0x25, 0x59, 0xbb, 0x72, 0xbf, 0x0a, 0xb7, 0x8c, 0x8a, 0xb9, 0x2f, - 0x00, 0xef, 0x0d, 0x0d, 0x57, 0x6e, 0xcc, 0xdd, 0x48, 0x6b, 0x64, 0x13, 0x8a, 0x41, 0x72}, - hash_type{0x67, 0x48, 0x57, 0xe5, 0x43, 0xd1, 0xd5, 0xb6, 0x39, 0x05, 0x8d, 0xd9, 0x08, 0x18, 0x65, 0x97, 0xe3, - 0x66, 0xad, 0x5f, 0x3d, 0x9c, 0x7c, 0xea, 0xff, 0x44, 0xd0, 0x4d, 0x15, 0x50, 0xb8, 0xd3}, - hash_type{0x3a, 0xbc, 0x75, 0x1d, 0xf0, 0x74, 0x37, 0x83, 0x4b, 0xa5, 0xac, 0xb3, 0x23, 0x28, 0xa3, 0x96, 0x99, - 0x4a, 0xeb, 0xb3, 0xc4, 0x0f, 0x75, 0x9c, 0x2d, 0x6d, 0x7a, 0x3c, 0xb5, 0x37, 0x7e, 0x55}, - hash_type{0xd5, 0xd2, 0x18, 0xef, 0x5a, 0x29, 0x6d, 0xda, 0x8d, 0xdc, 0x35, 0x5f, 0x3f, 0x50, 0xc3, 0xd0, 0xb6, - 0x60, 0xa5, 0x1d, 0xfa, 0x4d, 0x98, 0xa6, 0xa5, 0xa3, 0x35, 0x64, 0x55, 0x6c, 0xf8, 0x3c}, - hash_type{0x13, 0x73, 0xa8, 0x14, 0x64, 0x1d, 0x6a, 0x1d, 0xce, 0xf9, 0x7b, 0x88, 0x3f, 0xee, 0x61, 0xbb, 0x84, - 0xfe, 0x60, 0xa3, 0x40, 0x93, 0x40, 0x21, 0x7e, 0x62, 0x9c, 0xc7, 0xe4, 0xdc, 0xc9, 0x3b}, - hash_type{0x85, 0xd8, 0x82, 0x09, 0x21, 0xff, 0x58, 0x26, 0x14, 0x8b, 0x60, 0xe6, 0x93, 0x9a, 0xcd, 0x78, 0x38, - 0xe1, 0xd7, 0xf2, 0x05, 0x62, 0xbf, 0xf8, 0xee, 0x4b, 0x5e, 0xc4, 0xa0, 0x5a, 0xd9, 0x97}, - hash_type{0xa5, 0x7b, 0x97, 0x96, 0xfd, 0xcb, 0x2e, 0xda, 0x87, 0x88, 0x3c, 0x26, 0x40, 0xb0, 0x72, 0xb1, 0x40, - 0xb9, 0x46, 0xbf, 0xdf, 0x65, 0x75, 0xca, 0xcc, 0x06, 0x6f, 0xda, 0xe0, 0x4f, 0x69, 0x51}, - hash_type{0xe6, 0x36, 0x24, 0xcb, 0xd3, 0x16, 0xa6, 0x77, 0xca, 0xd5, 0x29, 0xbb, 0xe4, 0xe9, 0x7b, 0x91, 0x44, - 0xe4, 0xbc, 0x06, 0xc4, 0xaf, 0xd1, 0xde, 0x55, 0xdd, 0x3e, 0x11, 0x75, 0xf9, 0x04, 0x23}, - hash_type{0x84, 0x7a, 0x23, 0x0d, 0x34, 0xdf, 0xb7, 0x1e, 0xd5, 0x6f, 0x29, 0x65, 0xa7, 0xf6, 0xc7, 0x2e, 0x6a, - 0xa3, 0x3c, 0x24, 0xc3, 0x03, 0xfd, 0x67, 0x74, 0x5d, 0x63, 0x26, 0x56, 0xc5, 0xef, 0x90}, - hash_type{0xbe, 0xc8, 0x0f, 0x4f, 0x5d, 0x1d, 0xaa, 0x25, 0x19, 0x88, 0x82, 0x6c, 0xef, 0x37, 0x5c, 0x81, 0xc3, - 0x6b, 0xf4, 0x57, 0xe0, 0x96, 0x87, 0x05, 0x6f, 0x92, 0x46, 0x77, 0xcb, 0x0b, 0xcc, 0xf9}, - hash_type{0x8d, 0xff, 0x81, 0xe0, 0x14, 0xce, 0x25, 0xf2, 0xd1, 0x32, 0x49, 0x79, 0x23, 0xe2, 0x67, 0x36, 0x39, - 0x63, 0xcd, 0xf4, 0x30, 0x2c, 0x50, 0x49, 0xd6, 0x31, 0x31, 0xdc, 0x03, 0xfd, 0x95, 0xf6}, - hash_type{0x5d, 0x8b, 0x6a, 0xa5, 0x93, 0x4f, 0x81, 0x72, 0x52, 0xc0, 0x28, 0xc9, 0x0f, 0x56, 0xd4, 0x13, 0xb9, - 0xd5, 0xd1, 0x0d, 0x89, 0x79, 0x07, 0x07, 0xda, 0xe2, 0xfa, 0xbb, 0x24, 0x9f, 0x64, 0x99}, - hash_type{0x29, 0x92, 0x7c, 0x21, 0xdd, 0x71, 0xe3, 0xf6, 0x56, 0x82, 0x6d, 0xe5, 0x45, 0x1c, 0x5d, 0xa3, 0x75, - 0xaa, 0xde, 0xcb, 0xd5, 0x9d, 0x5e, 0xbf, 0x3a, 0x31, 0xfa, 0xe6, 0x5a, 0xc1, 0xb3, 0x16}, - hash_type{0xa1, 0x61, 0x1f, 0x1b, 0x27, 0x6b, 0x26, 0x53, 0x0f, 0x58, 0xd7, 0x24, 0x7d, 0xf4, 0x59, 0xce, 0x1f, - 0x86, 0xdb, 0x1d, 0x73, 0x4f, 0x6f, 0x81, 0x19, 0x32, 0xf0, 0x42, 0xce, 0xe4, 0x5d, 0x0e}, - hash_type{0x45, 0x53, 0x06, 0xd0, 0x10, 0x81, 0xbc, 0x33, 0x84, 0xf8, 0x2c, 0x5f, 0xb2, 0xaa, 0xca, 0xa1, 0x9d, - 0x89, 0xcd, 0xfa, 0x46, 0xcc, 0x91, 0x6e, 0xac, 0x61, 0x12, 0x14, 0x75, 0xba, 0x2e, 0x61}, - hash_type{0x91, 0xb4, 0xfe, 0xec, 0xbe, 0x17, 0x89, 0x71, 0x70, 0x21, 0xa1, 0x58, 0xac, 0xe5, 0xd0, 0x67, 0x44, - 0xb4, 0x0f, 0x55, 0x10, 0x76, 0xb6, 0x7c, 0xd6, 0x3a, 0xf6, 0x00, 0x07, 0xf8, 0xc9, 0x98}, - hash_type{0x76, 0xe1, 0x42, 0x48, 0x83, 0xa4, 0x5e, 0xc4, 0x9d, 0x49, 0x7d, 0xda, 0xf8, 0x08, 0xa5, 0x52, 0x1c, - 0xa7, 0x4a, 0x99, 0x9a, 0xb0, 0xb3, 0xc7, 0xaa, 0x9c, 0x80, 0xf8, 0x5e, 0x93, 0x97, 0x7e}, - hash_type{0xc6, 0x1c, 0xe6, 0x8b, 0x20, 0x30, 0x7a, 0x1a, 0x81, 0xf7, 0x1c, 0xa6, 0x45, 0xb5, 0x68, 0xfc, 0xd3, - 0x19, 0xcc, 0xbb, 0x5f, 0x65, 0x1e, 0x87, 0xb7, 0x07, 0xd3, 0x7c, 0x39, 0xe1, 0x5f, 0x94}, - hash_type{0x5e, 0xa6, 0x9e, 0x2f, 0x7c, 0x7d, 0x2c, 0xcc, 0x85, 0xb7, 0xe6, 0x54, 0xc0, 0x7e, 0x96, 0xf0, 0x63, - 0x6a, 0xe4, 0x04, 0x4f, 0xe0, 0xe3, 0x85, 0x90, 0xb4, 0x31, 0x79, 0x5a, 0xd0, 0xf8, 0x64}, - hash_type{0x7b, 0xdd, 0x61, 0x37, 0x13, 0xad, 0xa4, 0x93, 0xcc, 0x17, 0xef, 0xd3, 0x13, 0x20, 0x63, 0x80, 0xe6, - 0xa6, 0x85, 0xb8, 0x19, 0x84, 0x75, 0xbb, 0xd0, 0x21, 0xc6, 0xe9, 0xd9, 0x4d, 0xaa, 0xb2}, - hash_type{0x21, 0x49, 0x47, 0x12, 0x75, 0x06, 0x07, 0x3e, 0x44, 0xd5, 0x40, 0x8b, 0xa1, 0x66, 0xc5, 0x12, 0xa0, - 0xb8, 0x68, 0x05, 0xd0, 0x7f, 0x5a, 0x44, 0xd3, 0xc4, 0x17, 0x06, 0xbe, 0x2b, 0xc1, 0x5e}, - hash_type{0x71, 0x2e, 0x55, 0x80, 0x52, 0x48, 0xb9, 0x2e, 0x86, 0x77, 0xd9, 0x0f, 0x6d, 0x28, 0x4d, 0x1d, 0x6f, - 0xfa, 0xff, 0x2c, 0x43, 0x06, 0x57, 0x04, 0x2a, 0x0e, 0x82, 0x62, 0x4f, 0xa3, 0x71, 0x7b}, - hash_type{0x06, 0xcc, 0x0a, 0x6f, 0xd1, 0x22, 0x30, 0xea, 0x58, 0x6d, 0xae, 0x83, 0x01, 0x9f, 0xb9, 0xe0, 0x60, - 0x34, 0xed, 0x28, 0x03, 0xc9, 0x8d, 0x55, 0x4b, 0x93, 0xc9, 0xa5, 0x23, 0x48, 0xca, 0xff}, - hash_type{0xf7, 0x5c, 0x40, 0x17, 0x4a, 0x91, 0xf9, 0xae, 0x6b, 0x86, 0x47, 0x85, 0x4a, 0x15, 0x60, 0x29, 0xf0, - 0xb8, 0x8b, 0x83, 0x31, 0x66, 0x63, 0xce, 0x57, 0x4a, 0x49, 0x78, 0x27, 0x7b, 0xb6, 0xbb}, - hash_type{0x27, 0xa3, 0x10, 0x85, 0x63, 0x4b, 0x6e, 0xc7, 0x88, 0x64, 0xb6, 0xd8, 0x20, 0x1c, 0x7e, 0x93, 0x90, - 0x3d, 0x75, 0x81, 0x50, 0x67, 0xe3, 0x78, 0x28, 0x9a, 0x3d, 0x07, 0x2a, 0xe1, 0x72, 0xda}, - hash_type{0xfa, 0x6a, 0x45, 0x24, 0x70, 0xf8, 0xd6, 0x45, 0xbe, 0xbf, 0xad, 0x97, 0x79, 0x59, 0x4f, 0xc0, 0x78, - 0x4b, 0xb7, 0x64, 0xa2, 0x2e, 0x3a, 0x81, 0x81, 0xd9, 0x3d, 0xb7, 0xbf, 0x97, 0x89, 0x3c}, - hash_type{0x41, 0x42, 0x17, 0xa6, 0x18, 0xcc, 0xb1, 0x4c, 0xaa, 0x9e, 0x92, 0xe8, 0xc6, 0x16, 0x73, 0xaf, 0xc9, - 0x58, 0x36, 0x62, 0xe8, 0x12, 0xad, 0xba, 0x1f, 0x87, 0xa9, 0xc6, 0x82, 0x02, 0xd6, 0x0e}, - hash_type{0x90, 0x9e, 0xfa, 0xb4, 0x3c, 0x42, 0xc0, 0xcb, 0x00, 0x69, 0x5f, 0xc7, 0xf1, 0xff, 0xe6, 0x7c, 0x75, - 0xca, 0x89, 0x4c, 0x3c, 0x51, 0xe1, 0xe5, 0xe7, 0x31, 0x36, 0x01, 0x99, 0xe6, 0x00, 0xf6}, - hash_type{0xce, 0xd9, 0xa8, 0x7b, 0x2a, 0x6a, 0x87, 0xe7, 0x0b, 0xf2, 0x51, 0xbb, 0x50, 0x75, 0xab, 0x22, 0x21, - 0x38, 0x28, 0x81, 0x64, 0xb2, 0xed, 0xa7, 0x27, 0x51, 0x5e, 0xa7, 0xde, 0x12, 0xe2, 0x49}, - hash_type{0x6d, 0x4f, 0xe4, 0x2e, 0xa8, 0xd1, 0xa1, 0x20, 0xc0, 0x3c, 0xf9, 0xc5, 0x06, 0x22, 0xc2, 0xaf, 0xe4, - 0xac, 0xb0, 0xda, 0xd9, 0x8f, 0xd6, 0x2d, 0x07, 0xab, 0x4e, 0x82, 0x8a, 0x94, 0x49, 0x5f}, - hash_type{0x6d, 0x1a, 0xb9, 0x73, 0x98, 0x2c, 0x7c, 0xcb, 0xe6, 0xc1, 0xfa, 0xe0, 0x27, 0x88, 0xe4, 0x42, 0x2a, - 0xe2, 0x22, 0x82, 0xfa, 0x49, 0xcb, 0xdb, 0x04, 0xba, 0x54, 0xa7, 0xa2, 0x38, 0xc6, 0xfc}, - hash_type{0x41, 0x18, 0x74, 0x51, 0x38, 0x34, 0x60, 0x76, 0x2c, 0x06, 0xd1, 0xc8, 0xa7, 0x2b, 0x9c, 0xd7, 0x18, - 0x86, 0x6a, 0xd4, 0xb6, 0x89, 0xe1, 0x0c, 0x9a, 0x8c, 0x38, 0xfe, 0x5e, 0xf0, 0x45, 0xbd}, - hash_type{0x78, 0x5b, 0x01, 0xe9, 0x80, 0xfc, 0x82, 0xc7, 0xe3, 0x53, 0x2c, 0xe8, 0x18, 0x76, 0xb7, 0x78, 0xdd, - 0x9f, 0x1c, 0xee, 0xba, 0x44, 0x78, 0xe8, 0x64, 0x11, 0xfb, 0x6f, 0xdd, 0x79, 0x06, 0x83}, - hash_type{0x91, 0x6c, 0xa8, 0x32, 0x59, 0x24, 0x85, 0x09, 0x36, 0x44, 0xe8, 0x76, 0x0c, 0xd7, 0xb4, 0xc0, 0x1d, - 0xba, 0x1c, 0xcc, 0x82, 0xb6, 0x61, 0xbf, 0x13, 0xf0, 0xe3, 0xf3, 0x4a, 0xcd, 0x6b, 0x88}}; - -#define LOG2_WORD_SIZE 3 - -struct incremental_merkle_tree_of_pages { - std::vector m_tree; - int m_page_log2_size = 0; - int m_tree_log2_size = 0; - uint64_t m_page_count = 0; - uint64_t m_max_pages = 0; - - incremental_merkle_tree_of_pages(int page_log2_size, int tree_log2_size) : - m_page_log2_size(page_log2_size), - m_tree_log2_size(tree_log2_size), - m_page_count(0), - m_max_pages(UINT64_C(1) << (tree_log2_size - page_log2_size)) { - m_tree.resize(2 * m_max_pages); - } - - void add_page(const hash_type &new_page_hash) { - cartesi::keccak_256_hasher h; - hash_type right = new_page_hash; - if (m_page_count >= m_max_pages) { - throw std::out_of_range("Page count must be smaller than max pages"); - } - int depth = m_tree_log2_size - m_page_log2_size; - for (int i = 0; i <= depth; ++i) { - if (m_page_count & (UINT64_C(1) << i)) { - hash_type left = m_tree[i]; - get_concat_hash(h, left, right, right); - } else { - m_tree[i] = right; - break; - } - } - ++m_page_count; - } - - hash_type get_root_hash() const { - if (m_page_count > m_max_pages) { - throw std::out_of_range("Page count must be smaller or equal than max pages"); - } - cartesi::keccak_256_hasher h; - int depth = m_tree_log2_size - m_page_log2_size; - if (m_page_count < m_max_pages) { - hash_type root = zero_keccak_hash_table[m_page_log2_size]; - for (int i = 0; i < depth; ++i) { - if (m_page_count & (UINT64_C(1) << i)) { - auto left = m_tree[i]; - get_concat_hash(h, left, root, root); - } else { - auto right = zero_keccak_hash_table[m_page_log2_size + i]; - get_concat_hash(h, root, right, root); - } - } - return root; - } else { - return m_tree[depth]; - } - } -}; +constexpr int WORD_LOG2_SIZE = 3; +constexpr uint64_t WORD_SIZE = (UINT64_C(1) << WORD_LOG2_SIZE); +constexpr int PAGE_LOG2_SIZE = 12; +constexpr int PAGE_SIZE = (UINT64_C(1) << PAGE_LOG2_SIZE); // Calculate root hash for data buffer of log2_size -hash_type calculate_root_hash(const std::vector &data, int log2_size) { - cartesi::keccak_256_hasher h; +namespace detail { +static hash_type merkle_hash(cartesi::keccak_256_hasher &h, const std::string_view &data, int log2_size) { hash_type result; - if (log2_size < LOG2_WORD_SIZE) { - throw std::invalid_argument("Wrong data size"); - } else if (log2_size > LOG2_WORD_SIZE) { + if (log2_size > WORD_LOG2_SIZE) { --log2_size; - uint64_t sz = data.size() / 2 + data.size() % 2; - auto child1 = calculate_root_hash(std::vector{data.cbegin(), data.cbegin() + sz}, log2_size); - auto child2 = calculate_root_hash(std::vector{data.cbegin() + sz, data.cend()}, log2_size); - get_concat_hash(h, child1, child2, result); + auto half_size = data.size() / 2; + auto left = merkle_hash(h, std::string_view{data.data(), half_size}, log2_size); + auto right = merkle_hash(h, std::string_view{data.data() + half_size, half_size}, log2_size); + get_concat_hash(h, left, right, result); } else { - h.add_data(data.data(), data.size()); + h.add_data(reinterpret_cast(data.data()), data.size()); h.end(result); } return result; } +} // namespace detail -// Taking memory region in buffer data_buffer, and occuping data_number_of_pages -// of page size page_log2_size -// calculate merke hash for region of up to tree_log2_size, -// using zero sibling hashes where needed -static hash_type calculate_region_hash(const std::vector &data_buffer, int data_number_of_pages, - int page_log2_size, int tree_log2_size) { - int page_size = 1 << page_log2_size; - auto incremental_tree = incremental_merkle_tree_of_pages(page_log2_size, tree_log2_size); - - for (int i = 0; i < data_number_of_pages; ++i) { - auto current_page_data = - std::vector(data_buffer.begin() + i * page_size, data_buffer.begin() + (i + 1) * page_size); - auto current_page_hash = calculate_root_hash(current_page_data, page_log2_size); - incremental_tree.add_page(current_page_hash); +static hash_type merkle_hash(const std::string_view &data, int log2_size) { + if (log2_size > 63) { + throw std::domain_error("log2_size is too large"); } - return incremental_tree.get_root_hash(); -} - -// Take data hash of some region and extend it with pristine space -// up to tree_log2_size, calculating target hash -static hash_type extend_region_hash(hash_type data_hash, uint64_t data_address, int data_log2_size, - int tree_log2_size) { - auto result_hash = data_hash; - auto result_address = data_address; - for (int n = data_log2_size + 1; n <= tree_log2_size; ++n) { - cartesi::keccak_256_hasher h; - if ((result_address & (UINT64_C(-1) >> (64 - n))) == 0) { - auto child1 = result_hash; - auto child2 = zero_keccak_hash_table[n - 1]; - get_concat_hash(h, child1, child2, result_hash); - } else { - auto child1 = zero_keccak_hash_table[n - 1]; - auto child2 = result_hash; - get_concat_hash(h, child1, child2, result_hash); - result_address = result_address & (~UINT64_C(1) << (n - 1)); - } + if (log2_size < 3) { + throw std::domain_error("log2_size is too small"); } - - return result_hash; + if ((UINT64_C(1) << log2_size) != data.size()) { + throw std::invalid_argument("log2_size does not match data size"); + } + cartesi::keccak_256_hasher h; + return detail::merkle_hash(h, data, log2_size); } -// Taking memory region with starting data_address and log2_data_size -// calculate merke hash for region of up to log2_result_address_space, -// using zero sibling hashes where needed. Data_address may not be aligned -// to the beginning of the log2_result_address_space -static hash_type calculate_region_hash_2(uint64_t data_address, const std::vector data_buffer, - int log2_data_size, int log2_result_address_space) { - data_address = data_address & (~UINT64_C(1) << (log2_data_size - 1)); - auto data_hash = calculate_root_hash(data_buffer, log2_data_size); - auto result_hash = data_hash; - auto result_address = data_address; - for (int n = log2_data_size + 1; n <= log2_result_address_space; ++n) { - cartesi::keccak_256_hasher h; - if ((result_address & (UINT64_C(-1) >> (64 - n))) == 0) { - auto child1 = result_hash; - auto child2 = zero_keccak_hash_table[n - 1]; - get_concat_hash(h, child1, child2, result_hash); - } else { - auto child1 = zero_keccak_hash_table[n - 1]; - auto child2 = result_hash; - get_concat_hash(h, child1, child2, result_hash); - result_address = result_address & (~UINT64_C(1) << (n - 1)); - } - } - return result_hash; +// static std::string load_file(const std::string &path) { +// std::ifstream ifs(path, std::ios::binary); +// return std::string{std::istreambuf_iterator{ifs}, {}}; +//} + +static std::string load_file(const std::string &path) { + std::streampos size; + std::ifstream file(path, std::ios::binary); + file.seekg(0, std::ios::end); + size = file.tellg(); + file.seekg(0, std::ios::beg); + std::string data; + data.resize(size); + file.read(data.data(), data.size()); + return data; } static hash_type calculate_proof_root_hash(const cm_merkle_tree_proof *proof) { @@ -318,86 +101,50 @@ static hash_type calculate_proof_root_hash(const cm_merkle_tree_proof *proof) { return hash; } -static std::vector parse_pma_file(const std::string &path) { - std::streampos size; - std::ifstream file(path, std::ios::binary); - file.seekg(0, std::ios::end); - size = file.tellg(); - file.seekg(0, std::ios::beg); - - std::vector data(size); - file.read(reinterpret_cast(&data[0]), size); - return data; -} - static int ceil_log2(uint64_t x) { return static_cast(std::ceil(std::log2(static_cast(x)))); } -static hash_type calculate_emulator_hash(const std::vector &pmas_files) { - assert(pmas_files.size() >= 7); - using namespace cartesi; - cartesi::keccak_256_hasher h; - auto shadow_state = parse_pma_file(pmas_files[0]); - auto dtb = parse_pma_file(pmas_files[1]); - auto shadow_pmas = parse_pma_file(pmas_files[2]); - auto shadow_tlb = parse_pma_file(pmas_files[3]); - auto clint = parse_pma_file(pmas_files[4]); - auto htif = parse_pma_file(pmas_files[5]); - auto ram = parse_pma_file(pmas_files[6]); - std::vector uarch_ram(0); - if (pmas_files.size() >= 8) { - uarch_ram = parse_pma_file(pmas_files[7]); - } - - std::vector shadow_dtb; - shadow_dtb.reserve(shadow_state.size() + dtb.size() + shadow_pmas.size()); - shadow_dtb.insert(shadow_dtb.end(), shadow_state.begin(), shadow_state.end()); - shadow_dtb.insert(shadow_dtb.end(), dtb.begin(), dtb.end()); - shadow_dtb.insert(shadow_dtb.end(), shadow_pmas.begin(), shadow_pmas.end()); - - hash_type shadow_dtb_tlb_space_hash; - hash_type shadow_dtb_tlb_clint_hash; - hash_type left; - hash_type used_space_hash; - - int shadow_dtb_hash_size_log2 = ceil_log2(PMA_SHADOW_STATE_LENGTH + PMA_DTB_LENGTH + PMA_SHADOW_PMAS_LENGTH); - auto shadow_dtb_space_hash = calculate_region_hash(shadow_dtb, - (shadow_dtb.size() + PMA_PAGE_SIZE - 1) / PMA_PAGE_SIZE, PMA_PAGE_SIZE_LOG2, shadow_dtb_hash_size_log2); - shadow_dtb_space_hash = extend_region_hash(shadow_dtb_space_hash, 0, shadow_dtb_hash_size_log2, 17); - - auto tlb_size_log2 = ceil_log2(PMA_SHADOW_TLB_LENGTH); - auto tlb_space_hash = calculate_region_hash(shadow_tlb, (shadow_tlb.size() + PMA_PAGE_SIZE - 1) / PMA_PAGE_SIZE, - PMA_PAGE_SIZE_LOG2, tlb_size_log2); - tlb_space_hash = extend_region_hash(tlb_space_hash, PMA_SHADOW_TLB_START, tlb_size_log2, 17); - - get_concat_hash(h, shadow_dtb_space_hash, tlb_space_hash, shadow_dtb_tlb_space_hash); // 18 - shadow_dtb_tlb_space_hash = extend_region_hash(shadow_dtb_tlb_space_hash, 0, 18, 25); - - auto clint_size_log2 = ceil_log2(PMA_CLINT_LENGTH); - auto clint_space_hash = calculate_region_hash(clint, (clint.size() + PMA_PAGE_SIZE - 1) / PMA_PAGE_SIZE, - PMA_PAGE_SIZE_LOG2, clint_size_log2); - clint_space_hash = extend_region_hash(clint_space_hash, PMA_CLINT_START, clint_size_log2, 25); - - get_concat_hash(h, shadow_dtb_tlb_space_hash, clint_space_hash, shadow_dtb_tlb_clint_hash); // 26 - shadow_dtb_tlb_clint_hash = extend_region_hash(shadow_dtb_tlb_clint_hash, 0, 26, 29); - - uint64_t htif_size_log2 = ceil_log2(htif.size()); - auto htif_space_hash = calculate_region_hash_2(PMA_HTIF_START, htif, htif_size_log2, 29); - get_concat_hash(h, shadow_dtb_tlb_clint_hash, htif_space_hash, left); // 30 - - auto uarch_ram_space_hash = zero_keccak_hash_table[30]; - if (uarch_ram.size() > 0) { - auto uarch_ram_size_log2 = ceil_log2(uarch_ram.size()); - uarch_ram_space_hash = calculate_region_hash(uarch_ram, (uarch_ram.size() + PMA_PAGE_SIZE - 1) / PMA_PAGE_SIZE, - PMA_PAGE_SIZE_LOG2, uarch_ram_size_log2); - uarch_ram_space_hash = extend_region_hash(uarch_ram_space_hash, PMA_UARCH_RAM_START, uarch_ram_size_log2, 30); +static hash_type calculate_emulator_hash(const std::vector &pmas_files) { + struct pma_entry { + std::string path; + uint64_t start; + uint64_t length; + std::string data; + }; + std::vector pma_entries; + std::transform(pmas_files.begin(), pmas_files.end(), std::back_inserter(pma_entries), [](const std::string &path) { + uint64_t start; + uint64_t length; + int end = 0; + if (sscanf(path.data(), "%" SCNx64 "--%" SCNx64 ".bin%n", &start, &length, &end) != 2 || + static_cast(path.size()) != end) { + throw std::invalid_argument("PMA filename '" + path + "' does not match '%x--%x.bin'"); + } + if ((length >> PAGE_LOG2_SIZE) << PAGE_LOG2_SIZE != length) { + throw std::invalid_argument("PMA '" + path + "' length not multiple of page length"); + } + if ((start >> PAGE_LOG2_SIZE) << PAGE_LOG2_SIZE != start) { + throw std::invalid_argument("PMA '" + path + "' start not page-aligned"); + } + auto data = load_file(path); + if (data.length() != length) { + throw std::invalid_argument("PMA '" + path + "' length does not match filename"); + } + return pma_entry{path, start, length, std::move(data)}; + }); + std::sort(pma_entries.begin(), pma_entries.end(), + [](const pma_entry &a, const pma_entry &b) { return a.start < b.start; }); + cartesi::back_merkle_tree tree(64, 12, 3); + uint64_t last = 0; + for (const auto &e : pma_entries) { + tree.pad_back((e.start - last) >> PAGE_LOG2_SIZE); + for (uint64_t s = 0; s < e.length; s += PAGE_SIZE) { + std::string_view page{e.data.data() + s, PAGE_SIZE}; + auto page_hash = merkle_hash(page, PAGE_LOG2_SIZE); + tree.push_back(page_hash); + } + last = e.start + e.length; } - get_concat_hash(h, left, uarch_ram_space_hash, left); // 31 - - uint64_t ram_size_log2 = ceil_log2(ram.size()); - auto ram_space_hash = calculate_region_hash_2(PMA_RAM_START, ram, ram_size_log2, 31); - get_concat_hash(h, left, ram_space_hash, used_space_hash); // 32 - - return extend_region_hash(used_space_hash, 0, 32, 64); + return tree.get_root_hash(); } diff --git a/src/tests/machine-bind.lua b/src/tests/machine-bind.lua index 7b89ee608..689fb448a 100755 --- a/src/tests/machine-bind.lua +++ b/src/tests/machine-bind.lua @@ -144,6 +144,13 @@ for _, argument in ipairs({ ... }) do end end +local working_dir +do + local f = io.popen("pwd") + working_dir = string.gsub(f:read("a"), "%s*$", "/") + f:close() +end + local SHADOW_BASE = 0x0 local cpu_x_addr = {} @@ -269,15 +276,15 @@ end local pmas_file_names = { "0000000000000000--0000000000001000.bin", -- shadow state - "0000000000001000--000000000000f000.bin", -- dtb "0000000000010000--0000000000001000.bin", -- shadow pmas "0000000000020000--0000000000006000.bin", -- shadow tlb "0000000002000000--00000000000c0000.bin", -- clint "0000000040008000--0000000000001000.bin", -- htif + "000000007ff00000--0000000000100000.bin", -- dtb "0000000080000000--0000000000100000.bin", -- ram "0000000070000000--0000000000010000.bin", -- uarch ram } -local pmas_sizes = { 4096, 61440, 4096, 24576, 786432, 4096, 1048576, 65536, 65536 } +local pmas_sizes = { 4096, 4096, 24576, 786432, 4096, 1048576, 1048576, 65536 } local remote @@ -490,7 +497,7 @@ do_test("should return expected value", function(machine) print("Root hash: ", test_util.tohex(root_hash)) machine:dump_pmas() - local calculated_root_hash = test_util.calculate_emulator_hash(test_path, pmas_file_names, machine) + local calculated_root_hash = test_util.calculate_emulator_hash(working_dir, pmas_file_names, machine) for _, file_name in pairs(pmas_file_names) do os.remove(test_path .. file_name) end @@ -546,7 +553,7 @@ do_test("there should exist dumped files of expected size", function(machine) machine:dump_pmas() for i = 1, #pmas_file_names do - local dumped_file = test_path .. pmas_file_names[i] + local dumped_file = working_dir .. pmas_file_names[i] local fd = assert(io.open(dumped_file, "rb")) local real_file_size = fd:seek("end") fd:close(dumped_file) diff --git a/src/tests/machine-test.lua b/src/tests/machine-test.lua index 51caf6bcc..3756738d2 100755 --- a/src/tests/machine-test.lua +++ b/src/tests/machine-test.lua @@ -33,9 +33,6 @@ local checkin_address local test_path = "./" local cleanup = {} -local linux_image = test_util.images_path .. "linux.bin" -local rootfs_image = test_util.images_path .. "rootfs.ext2" - -- Print help and exit local function help() io.stderr:write(string.format( @@ -124,6 +121,13 @@ for _, argument in ipairs({ ... }) do end end +local working_dir +do + local f = io.popen("pwd") + working_dir = string.gsub(f:read("a"), "%s*$", "/") + f:close() +end + local machine_type = assert(arguments[1], "missing machine type") assert( machine_type == "local" or machine_type == "grpc" or machine_type == "jsonrpc", @@ -153,37 +157,33 @@ end local pmas_file_names = { "0000000000000000--0000000000001000.bin", -- shadow state - "0000000000001000--000000000000f000.bin", -- dtb "0000000000010000--0000000000001000.bin", -- shadow pmas "0000000000020000--0000000000006000.bin", -- shadow tlb "0000000002000000--00000000000c0000.bin", -- clint "0000000040008000--0000000000001000.bin", -- htif + "000000007ff00000--0000000000100000.bin", -- dtb "0000000080000000--0000000000100000.bin", -- ram } local pmas_file_names_with_uarch = { "0000000000000000--0000000000001000.bin", -- shadow state - "0000000000001000--000000000000f000.bin", -- dtb "0000000000010000--0000000000001000.bin", -- shadow pmas "0000000000020000--0000000000006000.bin", -- shadow tlb "0000000002000000--00000000000c0000.bin", -- clint "0000000040008000--0000000000001000.bin", -- htif + "000000007ff00000--0000000000100000.bin", -- dtb "0000000080000000--0000000000100000.bin", -- ram "0000000070000000--0000000000100000.bin", -- uarch ram } local remote local function build_machine(type, config) - -- Create new machine - -- Use default config to be max reproducible - local concurrency_update_merkle_tree = 0 config = config or { - processor = {}, ram = { length = 1 << 20 }, } local runtime = { concurrency = { - update_merkle_tree = concurrency_update_merkle_tree, + update_merkle_tree = 0, }, } local new_machine @@ -250,7 +250,7 @@ do_test("dumped file hashes should match memory data hashes", function(machine) local data_region_start = tonumber(temp[1], 16) local data_region_size = tonumber(temp[2], 16) - local dump = assert(io.open(test_path .. file_name, "rb")) + local dump = assert(io.open(working_dir .. file_name, "rb")) local dump_hash = md5.sumhexa(dump:read("*all")) dump:close() @@ -268,7 +268,7 @@ do_test("machine initial hash should match", function(machine) local root_hash = machine:get_root_hash() machine:dump_pmas() - local calculated_root_hash = test_util.calculate_emulator_hash(test_path, pmas_file_names, machine) + local calculated_root_hash = test_util.calculate_emulator_hash(working_dir, pmas_file_names, machine) print("Root hash:", test_util.tohex(root_hash), " calculated root hash:", test_util.tohex(calculated_root_hash)) @@ -286,7 +286,7 @@ test_util.make_do_test(build_uarch_machine, machine_type)( print("Root hash:", test_util.tohex(root_hash)) machine:dump_pmas() - local calculated_root_hash = test_util.calculate_emulator_hash(test_path, pmas_file_names_with_uarch, machine) + local calculated_root_hash = test_util.calculate_emulator_hash(working_dir, pmas_file_names_with_uarch, machine) remove_files(pmas_file_names) assert(test_util.tohex(root_hash) == test_util.tohex(calculated_root_hash), "Initial root hash does not match") @@ -299,7 +299,7 @@ test_util.make_do_test(build_uarch_machine, machine_type)( machine:dump_pmas() local calculated_root_hash_step1 = - test_util.calculate_emulator_hash(test_path, pmas_file_names_with_uarch, machine) + test_util.calculate_emulator_hash(working_dir, pmas_file_names_with_uarch, machine) -- Remove dumped pmas files remove_files(pmas_file_names) @@ -321,15 +321,15 @@ test_util.make_do_test(build_uarch_machine, machine_type)("proof check should pa -- hashes match machine:dump_pmas() local ram_file_name = pmas_file_names[5] - local ram = test_util.parse_pma_file(test_path .. ram_file_name) + local ram = test_util.load_file(working_dir .. ram_file_name) remove_files(pmas_file_names) local ram_address_start = tonumber(test_util.split_string(ram_file_name, "--.")[1], 16) - local ram_data_number_of_pages = math.ceil(#ram / (1 << 12)) - local ram_log2_data_size = math.ceil(math.log(#ram, 2)) - local calculated_ram_hash = test_util.calculate_region_hash(ram, ram_data_number_of_pages, 12, ram_log2_data_size) - local ram_proof = machine:get_proof(ram_address_start, ram_log2_data_size) + local ram_log2_size = math.ceil(math.log(#ram, 2)) + local calculated_ram_hash = test_util.merkle_hash(ram, 0, ram_log2_size) + + local ram_proof = machine:get_proof(ram_address_start, ram_log2_size) local root_hash = machine:get_root_hash() assert(test_util.tohex(root_hash) == test_util.tohex(ram_proof.root_hash), "root hash in proof does not match") @@ -359,7 +359,7 @@ do_test("mcycle and root hash should match", function(machine) local root_hash = machine:get_root_hash() machine:dump_pmas() - local calculated_root_hash_1000 = test_util.calculate_emulator_hash(test_path, pmas_file_names, machine) + local calculated_root_hash_1000 = test_util.calculate_emulator_hash(working_dir, pmas_file_names, machine) -- Remove dumped pmas files remove_files(pmas_file_names) @@ -390,7 +390,7 @@ do_test("mcycle and root hash should match", function(machine) print("End hash: ", test_util.tohex(root_hash)) machine:dump_pmas() - local calculated_end_hash = test_util.calculate_emulator_hash(test_path, pmas_file_names, machine) + local calculated_end_hash = test_util.calculate_emulator_hash(working_dir, pmas_file_names, machine) -- Remove dumped pmas files remove_files(pmas_file_names) @@ -408,7 +408,7 @@ do_test("proof and root hash should match", function(machine) local initial_ram_proof = machine:get_proof(ram_address_start, 10) -- Calculate hash local initial_memory_read = machine:read_memory(ram_address_start, 2 ^ 10) - local initial_calculated_hash = test_util.calculate_root_hash(initial_memory_read, 10) + local initial_calculated_hash = test_util.merkle_hash(initial_memory_read, 0, 10) assert( test_util.tohex(initial_ram_proof.target_hash) == test_util.tohex(initial_calculated_hash), "initial hash does not match" @@ -430,7 +430,7 @@ do_test("proof and root hash should match", function(machine) local ram_proof = machine:get_proof(ram_address_start, 10) -- Calculate hash local memory_read = machine:read_memory(ram_address_start, 2 ^ 10) - local calculated_hash = test_util.calculate_root_hash(memory_read, 10) + local calculated_hash = test_util.merkle_hash(memory_read, 0, 10) print( "end target hash:", @@ -468,8 +468,8 @@ test_util.make_do_test(build_machine, machine_type, { flash_drive = { { start = 0x80000000000000, + length = 0x100000, shared = false, - image_filename = rootfs_image, }, }, })("should replace flash drive and read something", function(machine) @@ -502,42 +502,6 @@ test_util.make_do_test(build_machine, machine_type, { os.remove(input_path) end) -print("\n\n check reading from an input and writing to an output flash drive") -test_util.make_do_test(build_machine, machine_type, { - processor = {}, - ram = { - image_filename = linux_image, - length = 0x4000000, - }, - dtb = { - bootargs = "console=hvc0 rootfstype=ext2 root=/dev/mtdblock0 rw quiet swiotlb=noforce single=yes splash=no " - .. "init=/opt/cartesi/bin/init " - .. "mtdparts=flash.0:-(root);flash.1:-(input);flash.2:-(output) -- " - .. "cat /mnt/input/etc/issue | dd status=none of=/dev/mtdblock2", - }, - flash_drive = { - { - start = 0x80000000000000, - image_filename = rootfs_image, - }, - { - start = 0x90000000000000, - image_filename = rootfs_image, - }, - { - start = 0xa0000000000000, - length = 4096, - }, - }, -})("should boot mount input flash drive and output to another flash drive", function(machine) - machine:run(MAX_MCYCLE) - assert(machine:read_iflags_H(), "machine should be halted") - - local expected_data = "cartesi" - local flash_data = machine:read_memory(0xa0000000000000, #expected_data) - assert(flash_data == expected_data, "unexpected flash drive output") -end) - print("\n\n check for relevant register values after step 1") test_util.make_do_test(build_uarch_machine, machine_type)("register values should match", function(machine) local uarch_pc_before = machine:read_uarch_pc() diff --git a/src/tests/util.lua b/src/tests/util.lua index 0677adfec..a8f54e863 100644 --- a/src/tests/util.lua +++ b/src/tests/util.lua @@ -18,90 +18,30 @@ local cartesi = require("cartesi") -local zero_keccak_hash_table = { - "", - "", - "011b4d03dd8c01f1049143cf9c4c817e4b167f1d1b83e5c6f0f10d89ba1e7bce", -- 3 - "4d9470a821fbe90117ec357e30bad9305732fb19ddf54a07dd3e29f440619254", -- 4 - "ae39ce8537aca75e2eff3e38c98011dfe934e700a0967732fc07b430dd656a23", -- 5 - "3fc9a15f5b4869c872f81087bb6104b7d63e6f9ab47f2c43f3535eae7172aa7f", -- 6 - "17d2dd614cddaa4d879276b11e0672c9560033d3e8453a1d045339d34ba601b9", -- 7 - "c37b8b13ca95166fb7af16988a70fcc90f38bf9126fd833da710a47fb37a55e6", -- 8 - "8e7a427fa943d9966b389f4f257173676090c6e95f43e2cb6d65f8758111e309", -- 9 - "30b0b9deb73e155c59740bacf14a6ff04b64bb8e201a506409c3fe381ca4ea90", -- 10 - "cd5deac729d0fdaccc441d09d7325f41586ba13c801b7eccae0f95d8f3933efe", -- 11 - "d8b96e5b7f6f459e9cb6a2f41bf276c7b85c10cd4662c04cbbb365434726c0a0", -- 12 - "c9695393027fb106a8153109ac516288a88b28a93817899460d6310b71cf1e61", -- 13 - "63e8806fa0d4b197a259e8c3ac28864268159d0ac85f8581ca28fa7d2c0c03eb", -- 14 - "91e3eee5ca7a3da2b3053c9770db73599fb149f620e3facef95e947c0ee860b7", -- 15 - "2122e31e4bbd2b7c783d79cc30f60c6238651da7f0726f767d22747264fdb046", -- 16 - "f7549f26cc70ed5e18baeb6c81bb0625cb95bb4019aeecd40774ee87ae29ec51", -- 17 - "7a71f6ee264c5d761379b3d7d617ca83677374b49d10aec50505ac087408ca89", -- 18 - "2b573c267a712a52e1d06421fe276a03efb1889f337201110fdc32a81f8e1524", -- 19 - "99af665835aabfdc6740c7e2c3791a31c3cdc9f5ab962f681b12fc092816a62f", -- 20 - "27d86025599a41233848702f0cfc0437b445682df51147a632a0a083d2d38b5e", -- 21 - "13e466a8935afff58bb533b3ef5d27fba63ee6b0fd9e67ff20af9d50deee3f8b", -- 22 - "f065ec220c1fd4ba57e341261d55997f85d66d32152526736872693d2b437a23", -- 23 - "3e2337b715f6ac9a6a272622fdc2d67fcfe1da3459f8dab4ed7e40a657a54c36", -- 24 - "766c5e8ac9a88b35b05c34747e6507f6b044ab66180dc76ac1a696de03189593", -- 25 - "fedc0d0dbbd855c8ead673544899b0960e4a5a7ca43b4ef90afe607de7698cae", -- 26 - "fdc242788f654b57a4fb32a71b335ef6ff9a4cc118b282b53bdd6d6192b7a82c", -- 27 - "3c5126b9c7e33c8e5a5ac9738b8bd31247fb7402054f97b573e8abb9faad219f", -- 28 - "4fd085aceaa7f542d787ee4196d365f3cc566e7bbcfbfd451230c48d804c017d", -- 29 - "21e2d8fa914e2559bb72bf0ab78c8ab92f00ef0d0d576eccdd486b64138a4172", -- 30 - "674857e543d1d5b639058dd908186597e366ad5f3d9c7ceaff44d04d1550b8d3", -- 31 - "3abc751df07437834ba5acb32328a396994aebb3c40f759c2d6d7a3cb5377e55", -- 32 - "d5d218ef5a296dda8ddc355f3f50c3d0b660a51dfa4d98a6a5a33564556cf83c", -- 33 - "1373a814641d6a1dcef97b883fee61bb84fe60a3409340217e629cc7e4dcc93b", -- 34 - "85d8820921ff5826148b60e6939acd7838e1d7f20562bff8ee4b5ec4a05ad997", -- 35 - "a57b9796fdcb2eda87883c2640b072b140b946bfdf6575cacc066fdae04f6951", -- 36 - "e63624cbd316a677cad529bbe4e97b9144e4bc06c4afd1de55dd3e1175f90423", -- 37 - "847a230d34dfb71ed56f2965a7f6c72e6aa33c24c303fd67745d632656c5ef90", -- 38 - "bec80f4f5d1daa251988826cef375c81c36bf457e09687056f924677cb0bccf9", -- 39 - "8dff81e014ce25f2d132497923e267363963cdf4302c5049d63131dc03fd95f6", -- 40 - "5d8b6aa5934f817252c028c90f56d413b9d5d10d89790707dae2fabb249f6499", -- 41 - "29927c21dd71e3f656826de5451c5da375aadecbd59d5ebf3a31fae65ac1b316", -- 42 - "a1611f1b276b26530f58d7247df459ce1f86db1d734f6f811932f042cee45d0e", -- 43 - "455306d01081bc3384f82c5fb2aacaa19d89cdfa46cc916eac61121475ba2e61", -- 44 - "91b4feecbe1789717021a158ace5d06744b40f551076b67cd63af60007f8c998", -- 45 - "76e1424883a45ec49d497ddaf808a5521ca74a999ab0b3c7aa9c80f85e93977e", -- 46 - "c61ce68b20307a1a81f71ca645b568fcd319ccbb5f651e87b707d37c39e15f94", -- 47 - "5ea69e2f7c7d2ccc85b7e654c07e96f0636ae4044fe0e38590b431795ad0f864", -- 48 - "7bdd613713ada493cc17efd313206380e6a685b8198475bbd021c6e9d94daab2", -- 49 - "214947127506073e44d5408ba166c512a0b86805d07f5a44d3c41706be2bc15e", -- 50 - "712e55805248b92e8677d90f6d284d1d6ffaff2c430657042a0e82624fa3717b", -- 51 - "06cc0a6fd12230ea586dae83019fb9e06034ed2803c98d554b93c9a52348caff", -- 52 - "f75c40174a91f9ae6b8647854a156029f0b88b83316663ce574a4978277bb6bb", -- 53 - "27a31085634b6ec78864b6d8201c7e93903d75815067e378289a3d072ae172da", -- 54 - "fa6a452470f8d645bebfad9779594fc0784bb764a22e3a8181d93db7bf97893c", -- 55 - "414217a618ccb14caa9e92e8c61673afc9583662e812adba1f87a9c68202d60e", -- 56 - "909efab43c42c0cb00695fc7f1ffe67c75ca894c3c51e1e5e731360199e600f6", -- 57 - "ced9a87b2a6a87e70bf251bb5075ab222138288164b2eda727515ea7de12e249", -- 58 - "6d4fe42ea8d1a120c03cf9c50622c2afe4acb0dad98fd62d07ab4e828a94495f", -- 59 - "6d1ab973982c7ccbe6c1fae02788e4422ae22282fa49cbdb04ba54a7a238c6fc", -- 60 - "41187451383460762c06d1c8a72b9cd718866ad4b689e10c9a8c38fe5ef045bd", -- 61 - "785b01e980fc82c7e3532ce81876b778dd9f1ceeba4478e86411fb6fdd790683", -- 62 - "916ca832592485093644e8760cd7b4c01dba1ccc82b661bf13f0e3f34acd6b88", -- 63 -} - local function adjust_images_path(path) if not path then return "" end return string.gsub(path, "/*$", "") .. "/" end local test_util = { - incremental_merkle_tree_of_pages = { - m_context = {}, - m_page_log2_size = 0, - m_tree_log2_size = 0, - m_page_count = 0, - m_max_pages = 0, - }, hash = { LOG2_WORD_SIZE = 3 }, images_path = adjust_images_path(os.getenv("CARTESI_IMAGES_PATH")), tests_path = adjust_images_path(os.getenv("CARTESI_TESTS_PATH")), } +local zero_keccak_hash_table = { + "", + "", +} + +do + local hash = cartesi.keccak(string.rep("\0", 8)) + for i = 3, 63 do + zero_keccak_hash_table[i] = hash + hash = cartesi.keccak(hash, hash) + end +end + test_util.uarch_programs = { halt = { 0x32800293, -- li t0, UARCH_HALT_FLAG_SHADDOW_ADDR_DEF (0x328) @@ -137,24 +77,14 @@ end function test_util.disabled_test(description) print("Disabled test - " .. description) end -function test_util.incremental_merkle_tree_of_pages:new(o, page_log2_size, tree_log2_size) - o = o or {} - setmetatable(o, self) - self.__index = self - self.m_context = {} - self.m_page_log2_size = page_log2_size - self.m_tree_log2_size = tree_log2_size - self.m_page_count = 0 - self.m_max_pages = 0x01 << (tree_log2_size - page_log2_size) - return o -end +local back_merkle_tree_meta = { __index = {} } -function test_util.incremental_merkle_tree_of_pages:add_page(new_page_hash) - local right = new_page_hash - assert(self.m_page_count < self.m_max_pages, "Page count must be smaller than max pages") - local depth = self.m_tree_log2_size - self.m_page_log2_size +function back_merkle_tree_meta.__index:push_back(new_leaf_hash) + local right = new_leaf_hash + assert(self.m_leaf_count < self.m_max_leaves, "too many leaves") + local depth = self.m_log2_root_size - self.m_log2_leaf_size for i = 0, depth do - if self.m_page_count & (0x01 << i) ~= 0x0 then + if self.m_leaf_count & (0x01 << i) ~= 0x0 then local left = self.m_context[i] right = cartesi.keccak(left, right) else @@ -162,20 +92,59 @@ function test_util.incremental_merkle_tree_of_pages:add_page(new_page_hash) break end end - self.m_page_count = self.m_page_count + 1 + self.m_leaf_count = self.m_leaf_count + 1 +end + +function back_merkle_tree_meta.__index:pad_back(new_leaf_count) + assert( + new_leaf_count <= self.m_max_leaves and self.m_leaf_count + new_leaf_count <= self.m_max_leaves, + "too many leaves" + ) + local depth = self.m_log2_root_size - self.m_log2_leaf_size + -- pad with progressively larger trees until our smallest tree has more leaves than the leaf count left + for j = 0, depth do + local j_span = 0x1 << j + if j_span > new_leaf_count then break end + -- is our smallest tree at depth j? + if (self.m_leaf_count & j_span) ~= 0x0 then + -- if so, we can add 2^j pristine leaves directly + local right = zero_keccak_hash_table[self.m_log2_leaf_size + j] + for i = j, depth do + local i_span = 0x1 << i + if (self.m_leaf_count & i_span) ~= 0x0 then + local left = self.m_context[i] + right = cartesi.keccak(left, right) + else + self.m_context[i] = right + break + end + end + new_leaf_count = new_leaf_count - j_span + self.m_leaf_count = self.m_leaf_count + j_span + end + end + -- now add the rest of the padding directly to the context + for i = 0, depth do + local i_span = 0x1 << i + if (new_leaf_count & i_span) ~= 0x0 then + self.m_context[i] = zero_keccak_hash_table[self.m_log2_leaf_size + i] + new_leaf_count = new_leaf_count - i_span + self.m_leaf_count = self.m_leaf_count + i_span + end + end end -function test_util.incremental_merkle_tree_of_pages:get_root_hash() - assert(self.m_page_count <= self.m_max_pages, "Page count must be smaller or equal than max pages") - local depth = self.m_tree_log2_size - self.m_page_log2_size - if self.m_page_count < self.m_max_pages then - local root = test_util.fromhex(zero_keccak_hash_table[self.m_page_log2_size]) +function back_merkle_tree_meta.__index:get_root_hash() + assert(self.m_leaf_count <= self.m_max_leaves, "too many leaves") + local depth = self.m_log2_root_size - self.m_log2_leaf_size + if self.m_leaf_count < self.m_max_leaves then + local root = zero_keccak_hash_table[self.m_log2_leaf_size] for i = 0, depth - 1 do - if (self.m_page_count & (0x01 << i)) ~= 0 then + if (self.m_leaf_count & (0x01 << i)) ~= 0 then local left = self.m_context[i] root = cartesi.keccak(left, root) else - local right = test_util.fromhex(zero_keccak_hash_table[self.m_page_log2_size + i]) + local right = zero_keccak_hash_table[self.m_log2_leaf_size + i] root = cartesi.keccak(root, right) end end @@ -185,6 +154,16 @@ function test_util.incremental_merkle_tree_of_pages:get_root_hash() end end +function test_util.new_back_merkle_tree(log2_root_size, log2_leaf_size) + local self = {} + self.m_context = {} + self.m_log2_leaf_size = log2_leaf_size + self.m_log2_root_size = log2_root_size + self.m_leaf_count = 0 + self.m_max_leaves = 0x01 << (log2_root_size - log2_leaf_size) + return setmetatable(self, back_merkle_tree_meta) +end + function test_util.file_exists(name) local f = io.open(name, "r") if f ~= nil then @@ -195,14 +174,14 @@ function test_util.file_exists(name) end end -function test_util.fromhex(str) - return (str:gsub("..", function(cc) return string.char(tonumber(cc, 16)) end)) -end - function test_util.tohex(str) return (str:gsub(".", function(c) return string.format("%02X", string.byte(c)) end)) end +function test_util.fromhex(str) + return (str:gsub("..", function(cc) return string.char(tonumber(cc, 16)) end)) +end + function test_util.split_string(inputstr, sep) if sep == nil then sep = "%s" end local t = {} @@ -229,93 +208,7 @@ end function test_util.align(v, el) return (v >> el << el) end --- Calculate root hash for data buffer of log2_size -function test_util.calculate_root_hash(data, log2_size) - if log2_size < test_util.hash.LOG2_WORD_SIZE then - error("Wrong data size", 2) - elseif log2_size > test_util.hash.LOG2_WORD_SIZE then - log2_size = log2_size - 1 - local sz = math.ceil(data:len() / 2) - local child1 = test_util.calculate_root_hash(data:sub(1, sz), log2_size) - local child2 = test_util.calculate_root_hash(data:sub(sz + 1, data:len()), log2_size) - local hash = cartesi.keccak(child1, child2) - return hash - else - local hash = cartesi.keccak(data) - return hash - end -end - --- Taking memory region in buffer data_buffer, and occuping data_number_of_pages --- of page size page_log2_size --- calculate merke hash for region of up to tree_log2_size, --- using zero sibling hashes where needed -function test_util.calculate_region_hash(data_buffer, data_number_of_pages, page_log2_size, tree_log2_size) - local page_size = 1 << page_log2_size - - local incremental_tree = test_util.incremental_merkle_tree_of_pages:new({}, page_log2_size, tree_log2_size) - - for i = 0, data_number_of_pages - 1 do - local current_page_data = data_buffer:sub(i * page_size + 1, (i + 1) * page_size) - local current_page_hash = test_util.calculate_root_hash(current_page_data, page_log2_size) - incremental_tree:add_page(current_page_hash) - end - - local root_hash = incremental_tree:get_root_hash() - - return root_hash -end - --- Take data hash of some region and extend it with pristine space --- up to tree_log2_size, calculating target hash -function test_util.extend_region_hash(data_hash, data_address, data_log2_size, tree_log2_size) - local result_hash = data_hash - local result_address = data_address - for n = data_log2_size + 1, tree_log2_size do - if result_address & ((1 << n) - 1) == 0 then - local child1 = result_hash - local child2 = test_util.fromhex(zero_keccak_hash_table[n - 1]) - result_hash = cartesi.keccak(child1, child2) - else - local child1 = test_util.fromhex(zero_keccak_hash_table[n - 1]) - local child2 = result_hash - result_hash = cartesi.keccak(child1, child2) - result_address = result_address & (~0x01 << (n - 1)) - -- print("calculated level: ",n," value: ", test_util.tohex(result_hash)) - end - end - - return result_hash -end - --- Taking memory region with starting data_address and log2_data_size --- calculate merke hash for region of up to log2_result_address_space, --- using zero sibling hashes where needed. Data_address may not be aligned --- to the beginning of the log2_result_address_space -function test_util.calculate_region_hash_2(data_address, data_buffer, log2_data_size, log2_result_address_space) - data_address = data_address & (~0x01 << (log2_data_size - 1)) - - local data_hash = test_util.calculate_root_hash(data_buffer, log2_data_size) - - local result_hash = data_hash - local result_address = data_address - for n = log2_data_size + 1, log2_result_address_space do - if result_address & ((1 << n) - 1) == 0 then - local child1 = result_hash - local child2 = test_util.fromhex(zero_keccak_hash_table[n - 1]) - result_hash = cartesi.keccak(child1, child2) - else - local child1 = test_util.fromhex(zero_keccak_hash_table[n - 1]) - local child2 = result_hash - result_hash = cartesi.keccak(child1, child2) - result_address = result_address & (~0x01 << (n - 1)) - end - end - - return result_hash -end - -function test_util.parse_pma_file(filename) +function test_util.load_file(filename) local fd = assert(io.open(filename, "rb")) local data_size = fd:seek("end") fd:seek("set") @@ -324,94 +217,48 @@ function test_util.parse_pma_file(filename) return data end --- PMA defs -local PMA_SHADOW_STATE_LENGTH = 0x1000 -local PMA_SHADOW_PMAS_LENGTH = 0x1000 -local PMA_DTB_LENGTH = 0xF000 -local PMA_SHADOW_TLB_START = 0x20000 -local PMA_SHADOW_TLB_LENGTH = 0x6000 -local PMA_CLINT_START = 0x2000000 -local PMA_CLINT_LENGTH = 0xC0000 -local PMA_HTIF_START = 0x40008000 -local PMA_UARCH_RAM_START = 0x70000000 -local PMA_RAM_START = 0x80000000 -local PMA_PAGE_SIZE_LOG2 = 12 -local PMA_PAGE_SIZE = 1 << PMA_PAGE_SIZE_LOG2 +local PAGE_LOG2_SIZE = 12 +local PAGE_SIZE = 1 << PAGE_LOG2_SIZE +local WORD_LOG2_SIZE = 3 -local function ceil_log2(x) - assert(x > 0, "shouldn't try to compute log of x <= 0") - return math.ceil(math.log(x, 2)) // 1 +local function merkle_hash(data, start, log2_size) + if log2_size > WORD_LOG2_SIZE then + local child_log2_size = log2_size - 1 + local left = merkle_hash(data, start, child_log2_size) + local right = merkle_hash(data, start + (1 << child_log2_size), child_log2_size) + return cartesi.keccak(left, right) + else + return cartesi.keccak(data:sub(start + 1, start + (1 << WORD_LOG2_SIZE))) + end end -local extend_region_hash = test_util.extend_region_hash -local calculate_region_hash = test_util.calculate_region_hash -local calculate_region_hash_2 = test_util.calculate_region_hash_2 +test_util.merkle_hash = merkle_hash -- Take data from dumped memory files -- and calculate root hash of the machine function test_util.calculate_emulator_hash(test_path, pmas_files) - local shadow_state = test_util.parse_pma_file(test_path .. pmas_files[1]) - local dtb = test_util.parse_pma_file(test_path .. pmas_files[2]) - local shadow_pmas = test_util.parse_pma_file(test_path .. pmas_files[3]) - local shadow_tlb = test_util.parse_pma_file(test_path .. pmas_files[4]) - local clint = test_util.parse_pma_file(test_path .. pmas_files[5]) - local htif = test_util.parse_pma_file(test_path .. pmas_files[6]) - local ram = test_util.parse_pma_file(test_path .. pmas_files[7]) - local uarch_ram = "" - if pmas_files[8] then uarch_ram = test_util.parse_pma_file(test_path .. pmas_files[8]) end - - local shadow_dtb = shadow_state .. dtb .. shadow_pmas - - local shadow_dtb_hash_size_log2 = ceil_log2(PMA_SHADOW_STATE_LENGTH + PMA_DTB_LENGTH + PMA_SHADOW_PMAS_LENGTH) - local shadow_dtb_space_hash = calculate_region_hash( - shadow_dtb, - (#shadow_dtb + PMA_PAGE_SIZE - 1) // PMA_PAGE_SIZE, - PMA_PAGE_SIZE_LOG2, - shadow_dtb_hash_size_log2 - ) - shadow_dtb_space_hash = extend_region_hash(shadow_dtb_space_hash, 0, shadow_dtb_hash_size_log2, 17) - - local tlb_size_log2 = ceil_log2(PMA_SHADOW_TLB_LENGTH) - local tlb_space_hash = calculate_region_hash( - shadow_tlb, - (#shadow_tlb + PMA_PAGE_SIZE - 1) // PMA_PAGE_SIZE, - PMA_PAGE_SIZE_LOG2, - tlb_size_log2 - ) - tlb_space_hash = extend_region_hash(tlb_space_hash, PMA_SHADOW_TLB_START, tlb_size_log2, 17) - - local shadow_dtb_tlb_space_hash = cartesi.keccak(shadow_dtb_space_hash, tlb_space_hash) -- 18 - shadow_dtb_tlb_space_hash = extend_region_hash(shadow_dtb_tlb_space_hash, 0, 18, 25) - - local clint_size_log2 = ceil_log2(PMA_CLINT_LENGTH) - local clint_space_hash = - calculate_region_hash(clint, (#clint + PMA_PAGE_SIZE - 1) // PMA_PAGE_SIZE, PMA_PAGE_SIZE_LOG2, clint_size_log2) - clint_space_hash = extend_region_hash(clint_space_hash, PMA_CLINT_START, clint_size_log2, 25) - - local shadow_dtb_tlb_clint_hash = cartesi.keccak(shadow_dtb_tlb_space_hash, clint_space_hash) -- 26 - shadow_dtb_tlb_clint_hash = extend_region_hash(shadow_dtb_tlb_clint_hash, 0, 26, 29) - - local htif_size_log2 = ceil_log2(#htif) - local htif_space_hash = calculate_region_hash_2(PMA_HTIF_START, htif, htif_size_log2, 29) - local left = cartesi.keccak(shadow_dtb_tlb_clint_hash, htif_space_hash) -- 30 - local uarch_ram_space_hash = test_util.fromhex(zero_keccak_hash_table[30]) - if #uarch_ram > 0 then - local uarch_ram_size_log2 = ceil_log2(#uarch_ram) - uarch_ram_space_hash = calculate_region_hash( - uarch_ram, - (#uarch_ram + PMA_PAGE_SIZE - 1) // PMA_PAGE_SIZE, - PMA_PAGE_SIZE_LOG2, - uarch_ram_size_log2 - ) - uarch_ram_space_hash = extend_region_hash(uarch_ram_space_hash, PMA_UARCH_RAM_START, uarch_ram_size_log2, 30) + local pmas = {} + for _, pma_file in ipairs(pmas_files) do + local start, length = string.match(pma_file, "^(%x+)%-%-(%x+).bin$") + pmas[#pmas + 1] = { + path = pma_file, + start = assert(tonumber(start, 16), "invalid PMA start in dumped filename"), + length = assert(tonumber(length, 16), "invalid PMA length in dumped filename"), + data = test_util.load_file(test_path .. pma_file), + } end - left = cartesi.keccak(left, uarch_ram_space_hash) -- 31 - - local ram_size_log2 = ceil_log2(#ram) - local ram_space_hash = calculate_region_hash_2(PMA_RAM_START, ram, ram_size_log2, 31) - local used_space_hash = cartesi.keccak(left, ram_space_hash) -- 32 - - return test_util.extend_region_hash(used_space_hash, 0, 32, 64) + table.sort(pmas, function(a, b) return a.start < b.start end) + local tree = test_util.new_back_merkle_tree(64, PAGE_LOG2_SIZE) + local last = 0 + for _, v in ipairs(pmas) do + tree:pad_back((v.start - last) >> PAGE_LOG2_SIZE) + for j = 0, v.length - 1, PAGE_SIZE do + local page_hash = merkle_hash(v.data, j, PAGE_LOG2_SIZE) + tree:push_back(page_hash) + end + last = v.start + v.length + end + return tree:get_root_hash() end return test_util