From 2c95254d1bf07aa04700a38694990485422796cc Mon Sep 17 00:00:00 2001 From: ex0dus-0x Date: Fri, 27 Sep 2019 21:07:21 -0400 Subject: [PATCH 1/2] Add DeepState test integration --- setup-tools/CMakeLists.txt | 71 +- setup-tools/test/CMakeLists.txt | 3 +- .../test/deepstate-tests/CMakeLists.txt | 30 + .../deepstate-tests/test_aztec_common.cpp | 622 ++++++++++++++++++ .../test_compute_range_polynomial.cpp | 140 ++++ .../deepstate-tests/test_entry_points.cpp | 247 +++++++ .../test/deepstate-tests/test_setup.cpp | 356 ++++++++++ .../test/deepstate-tests/test_utils.hpp | 102 +++ 8 files changed, 1568 insertions(+), 3 deletions(-) create mode 100644 setup-tools/test/deepstate-tests/CMakeLists.txt create mode 100644 setup-tools/test/deepstate-tests/test_aztec_common.cpp create mode 100644 setup-tools/test/deepstate-tests/test_compute_range_polynomial.cpp create mode 100644 setup-tools/test/deepstate-tests/test_entry_points.cpp create mode 100644 setup-tools/test/deepstate-tests/test_setup.cpp create mode 100644 setup-tools/test/deepstate-tests/test_utils.hpp diff --git a/setup-tools/CMakeLists.txt b/setup-tools/CMakeLists.txt index 696810a..8e5c034 100644 --- a/setup-tools/CMakeLists.txt +++ b/setup-tools/CMakeLists.txt @@ -18,17 +18,58 @@ project(aztec-trusted-setup) cable_configure_compiler(NO_PEDANTIC NO_CONVERSION_WARNINGS) set(include_dir ${CMAKE_CURRENT_SOURCE_DIR}/include) +set(setup_test_dir ${CMAKE_CURRENT_SOURCE_DIR}/test/setup) +set(deepstate_test_dir ${CMAKE_CURRENT_SOURCE_DIR}/test/deepstate-tests) set(private_include_dir ${PROJECT_SOURCE_DIR}/src) set(DEPENDS_DIR ${PROJECT_SOURCE_DIR}/depends) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-parameter") +set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE ON) + +option( + GENCOV + "Enable gcov instrumentation" + OFF +) +if("${GENCOV}") + SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -fprofile-arcs -ftest-coverage") +endif() + +message(STATUS "GENCOV: ${GENCOV}") ### SETUP # Create a directory to store the trusted setup output add_custom_target(create-setup-db-directory ALL COMMAND ${CMAKE_COMMAND} -E make_directory ${PROJECT_SOURCE_DIR}/setup_db) +# regular gcov reporting with setup_tests +add_custom_target(cov + COMMAND ${CMAKE_COMMAND} -E make_directory cov_out + COMMAND ${CMAKE_CTEST_COMMAND} -R setup + WORKING_DIRECTORY ${CMAKE_BINARY_DIR}) +add_custom_command(TARGET cov + COMMAND echo "=================== GCOV ====================" + COMMAND echo "${setup_test_dir}" + COMMAND gcov -b ${setup_test_dir}/*.cpp -o ${CMAKE_BINARY_DIR}/test/setup/CMakeFiles/setup_tests.dir/ + COMMAND echo "-- Coverage files have been output to ${CMAKE_BINARY_DIR}/cov_out" + COMMAND echo "=================== GCOV ====================" + WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/cov_out) + +# gcov reporting for deepstate_tests +add_custom_target(dcov + COMMAND ${CMAKE_COMMAND} -E make_directory dcov_out + COMMAND ${CMAKE_CTEST_COMMAND} -R deepstate_test + WORKING_DIRECTORY ${CMAKE_BINARY_DIR}) +add_custom_command(TARGET dcov + COMMAND echo "=================== GCOV ====================" + COMMAND echo "${deepstate_test_dir}" + COMMAND gcov -b ${deepstate_test_dir}/*.cpp -o ${CMAKE_BINARY_DIR}/test/deepstate-tests/CMakeFiles/deepstate_tests.dir/ + COMMAND echo "-- Coverage files have been output to ${CMAKE_BINARY_DIR}/dcov_out" + COMMAND echo "=================== GCOV ====================" + WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/dcov_out) + + ### DEPENDENCIES # GMP @@ -78,6 +119,31 @@ add_definitions( -DNO_PROCPS ) +# DEFAULT SIMULATE_PARTICIPANT OFF +option( + SIMULATE_PARTICIPANT + "Setup toxic waste is hash of previous transcript" + OFF +) +if("${SIMULATE_PARTICIPANT}") + add_definitions(-DSIMULATE_PARTICIPANT) +endif() + +message(STATUS "SIMULATE_PARTICIPANT: ${SIMULATE_PARTICIPANT}") + + +# DEFAULT USE_CXX11_ABI OFF +option( + USE_CXX11_ABI + "Set to link to libraries that use legacy ABI (GCC 5.x)" + OFF +) +if("${USE_CXX11_ABI}") + add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) +endif() + +message(STATUS "USE_CXX11_ABI: ${USE_CXX11_ABI}") + add_subdirectory(depends) add_subdirectory(src) @@ -85,6 +151,7 @@ add_subdirectory(src) option(SETUP_TESTING "Build tests" ON) if(SETUP_TESTING) - enable_testing() - add_subdirectory(test) + enable_testing() + add_subdirectory(test) + add_test(deepstate_test ${CMAKE_CURRENT_BINARY_DIR}/test/deepstate_tests) endif() diff --git a/setup-tools/test/CMakeLists.txt b/setup-tools/test/CMakeLists.txt index 499cf5e..1239b1c 100644 --- a/setup-tools/test/CMakeLists.txt +++ b/setup-tools/test/CMakeLists.txt @@ -5,4 +5,5 @@ include(GoogleTest) set(setup_private_include_dir ${PROJECT_SOURCE_DIR}/src) -add_subdirectory(setup) \ No newline at end of file +add_subdirectory(deepstate-tests) +add_subdirectory(setup) diff --git a/setup-tools/test/deepstate-tests/CMakeLists.txt b/setup-tools/test/deepstate-tests/CMakeLists.txt new file mode 100644 index 0000000..43c4fd5 --- /dev/null +++ b/setup-tools/test/deepstate-tests/CMakeLists.txt @@ -0,0 +1,30 @@ +add_executable( + deepstate_tests + test_entry_points.cpp + test_aztec_common.cpp + test_setup.cpp + test_compute_range_polynomial.cpp + ../../src/verify/verifier.cpp +) + +target_link_libraries( + deepstate_tests + PRIVATE + ff + aztec_common + -ldeepstate_AFL + -pthread +) + +target_include_directories( + deepstate_tests + PRIVATE + ${PROJECT_SOURCE_DIR}/depends/libfqfft + ${private_include_dir} + ${include_dir} +) + +set_target_properties( + deepstate_tests + PROPERTIES RUNTIME_OUTPUT_DIRECTORY .. +) diff --git a/setup-tools/test/deepstate-tests/test_aztec_common.cpp b/setup-tools/test/deepstate-tests/test_aztec_common.cpp new file mode 100644 index 0000000..6c2e317 --- /dev/null +++ b/setup-tools/test/deepstate-tests/test_aztec_common.cpp @@ -0,0 +1,622 @@ +#include +#include +#include +#include +#include "test_utils.hpp" + +#include + +#include + +using namespace deepstate; + + +/* AztecCommon_BoringVariableSizeTests + * + * Concrete test for checking the size of various field elements. + */ +TEST(AztecCommon, BoringVariableSizeTests) +{ + libff::init_alt_bn128_params(); + size_t fq_bytes = sizeof(Fq); + size_t fr_bytes = sizeof(Fr); + size_t g1_bytes = sizeof(G1); + size_t g2_bytes = sizeof(G2); + + ASSERT_EQ(fq_bytes, 32); + ASSERT_EQ(fr_bytes, 32); + ASSERT_EQ(g1_bytes, 96); + ASSERT_EQ(g2_bytes, 192); +} + + +/* Streaming_BoringWriteBigIntToBuffer + * + * Concrete test that checks and verifies concrete + * bignum values and their resultant endianness. + */ +TEST(Streaming, BoringWriteBigIntToBuffer) +{ + libff::bigint<4> input; + + // generate bigints with concrete ulong vectors + input.data[3] = (mp_limb_t)0xffeeddccbbaa9988UL; + input.data[2] = (mp_limb_t)0x7766554433221100UL; + input.data[1] = (mp_limb_t)0xf0e1d2c3b4a59687UL; + input.data[0] = (mp_limb_t)0x78695a4b3c2d1e0fUL; + + // write bigints to buffer + char buffer[sizeof(mp_limb_t) * 4]; + streaming::write_bigint_to_buffer<4>(input, &buffer[0]); + + // cast buffer to libgmp bignum types. + mp_limb_t expected[4]; + expected[0] = *(mp_limb_t *)(&buffer[0]); + expected[1] = *(mp_limb_t *)(&buffer[8]); + expected[2] = *(mp_limb_t *)(&buffer[16]); + expected[3] = *(mp_limb_t *)(&buffer[24]); + + // compare output with original inputs with flipped endianess + ASSERT_EQ(expected[3], (mp_limb_t)0x8899aabbccddeeffUL); + ASSERT_EQ(expected[2], (mp_limb_t)0x0011223344556677UL); + ASSERT_EQ(expected[1], (mp_limb_t)0x8796a5b4c3d2e1f0UL); + ASSERT_EQ(expected[0], (mp_limb_t)0x0f1e2d3c4b5a6978UL); +} + + +/* Streaming_WriteBigIntToBuffer + * + * Tests arbitrary input as bignum values and checks + * for resultant endianness when reconverted to a libgmp bignum. + */ +TEST(Streaming, WriteBigIntToBuffer) +{ + libff::bigint<1> input; + mp_limb_t expected[1]; + + // generate input value casted to unsigned long + unsigned long bigint_in = (unsigned long) DeepState_UInt64(); + input.data[0] = (mp_limb_t) bigint_in; + LOG(TRACE) << "Unsigned long input: " << bigint_in; + ASSERT_EQ(input.as_ulong(), bigint_in) + << input.as_ulong() << " does not equal input " << bigint_in; + + // write bigint to buffer, store in libgmp output + char buffer[sizeof(mp_limb_t)]; + streaming::write_bigint_to_buffer<1>(input, &buffer[0]); + expected[0] = *(mp_limb_t *)(&buffer[0]); + + // compare ulong input with libgmp output, with swapped endianess + ASSERT_EQ(input.as_ulong(), __builtin_bswap64(expected[0])) + << input.as_ulong() << " does not equal " << __builtin_bswap64(expected[0]); +} + + +/* Streaming_ReadG1ElemsToBuffer + * + * Tests reading arbitrary buffer input to G1 elements + * and then comparing to newly written output buffer. + */ +TEST(Streaming, ReadG1ElemsToBuffer) +{ + constexpr size_t N = 10; + constexpr size_t element_size = sizeof(Fq) * 2; + constexpr size_t buffer_size = N * element_size; + + libff::init_alt_bn128_params(); + + std::vector result; + std::vector elems; + result.reserve(N); + elems.reserve(N); + + // buffers for reading/writing G1 elements + char out_buffer[buffer_size]; + char * buffer = DeepState_CStrUpToLen(buffer_size); + LOG(TRACE) << "Input buffer :" << buffer << + " of size: " << buffer_size; + + // read from elements out of buffer + streaming::read_g1_elements_from_buffer(elems, buffer, element_size); + for (size_t i = 0; i < N; i++) + { + elems[i].to_affine_coordinates(); + result.emplace_back(elems[i]); + } + streaming::write_g1_elements_to_buffer(result, out_buffer); + + ASSERT(memcmp(buffer, out_buffer, buffer_size)) + << "Input buffer: " << buffer << " is not equal to output buffer: " << out_buffer; +} + + +/* Streaming_BoringReadG1ElemsFromBufferFile + * + * Concrete test vector for testing reading empty + * transcript file to G1 element, writing to output buffer + * and comparing +TEST(Streaming, DISABLED_BoringReadG1ElemsFromBufferFile) +{ + // transcript file to tests should actually exist + std::string transcript_path("./transcript/transcript00.dat"); + if (!streaming::is_file_exist(transcript_path)) { + LOG(ERROR) << "Transcript path: " << transcript_path << " should be \ + manually initialized with empty transcript file"; + } + + constexpr size_t buffer_size = sizeof(Fq) * 2; + + libff::init_alt_bn128_params(); + + std::vector result; + std::vector elems; + + char buffer[buffer_size]; + char out_buffer[buffer_size]; + + // read to std::vec, convert to char * buffer with std::copy + auto _buffer = streaming::read_file_into_buffer(transcript_path); + LOG(TRACE) << "Input buffer size: " << _buffer.size(); + LOG(TRACE) << "Expected buffer size: " << buffer_size; + + std::copy(_buffer.begin(), _buffer.end(), buffer); + + // read contents from file to G1 element + streaming::read_g1_elements_from_buffer(elems, buffer, buffer_size); + elems[0].to_affine_coordinates(); + result.emplace_back(elems[0]); + + // write back to output buffer and compare + streaming::write_g1_elements_to_buffer(result, out_buffer); + ASSERT(memcmp(buffer, out_buffer, buffer_size)) + << "out_buffer contents: " << out_buffer; +} + */ + +/* Streaming_BoringWriteG1ElemsToBuffer + * + * Concrete test vector for writing random G1 elements to + * a buffer, reading and then validating them. + */ +TEST(Streaming, BoringWriteG1ElemsToBuffer) +{ + constexpr size_t N = 100; + constexpr size_t element_size = sizeof(Fq) * 2; + constexpr size_t buffer_size = N * element_size; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector result; + std::vector expected; + result.reserve(N); + expected.reserve(N); + + char buffer[buffer_size]; + + for (size_t i = 0; i < N; ++i) + { + G1 point = G1::random_element(); + point.to_affine_coordinates(); + expected.emplace_back(point); + } + streaming::write_g1_elements_to_buffer(expected, buffer); + + streaming::read_g1_elements_from_buffer(result, buffer, buffer_size); + for (size_t i = 0; i < N; ++i) + { + test_utils::validate_g1_point(result[i], expected[i]); + } +} + + +/* Streaming_WriteG1ElemToBuffer + * + * Tests writing a single generated G1 element to a buffer, + * and then reading and validating it. + */ +TEST(Streaming, WriteG1ElemToBuffer) +{ + constexpr size_t N = 1; + constexpr size_t element_size = sizeof(Fq) * 2; + constexpr size_t buffer_size = N * element_size; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector result; + std::vector expected; + result.reserve(N); + expected.reserve(N); + + char buffer[buffer_size]; + + // generate Jacobian coordinates for G1 element + G1 point = test_utils::DeepState_G1(); + point.to_affine_coordinates(); + expected.emplace_back(point); + + streaming::write_g1_elements_to_buffer(expected, buffer); + streaming::read_g1_elements_from_buffer(result, buffer, buffer_size); + + test_utils::validate_g1_point(result[0], expected[0]); +} + + +/* Streaming_BoringReadG2ElemsFromBufferFile + * + * Concrete test vector for testing reading empty + * transcript file to G2 element, writing to output buffer + * and comparing. +TEST(Streaming, DISABLED_BoringReadG2ElemsFromBufferFile) +{ + // transcript file to tests hould actually exist + std::string transcript_path("./transcript/transcript00.dat"); + if (!streaming::is_file_exist(transcript_path)) { + LOG(ERROR) << "Transcript path: " << transcript_path << " should be \ + manually initialized with empty transcript file"; + } + + constexpr size_t buffer_size = sizeof(Fqe) * 2; + + libff::init_alt_bn128_params(); + + std::vector result; + std::vector elems; + + char buffer[buffer_size]; + char out_buffer[buffer_size]; + + // read to std::vec, convert to char * buffer with std::copy + auto _buffer = streaming::read_file_into_buffer(transcript_path); + LOG(TRACE) << "Input buffer size: " << _buffer.size(); + LOG(TRACE) << "Expected buffer size: " << buffer_size; + + std::copy(_buffer.begin(), _buffer.end(), buffer); + + // read contents from file to G1 element + streaming::read_g2_elements_from_buffer(elems, buffer, buffer_size); + elems[0].to_affine_coordinates(); + result.emplace_back(elems[0]); + + // write back to output buffer and compare + streaming::write_g2_elements_to_buffer(result, out_buffer); + ASSERT(memcmp(buffer, out_buffer, buffer_size)) + << "out_buffer contents: " << out_buffer; +} +*/ + + +/* Streaming_ReadG2ElemsToBuffer + * + * Tests reading arbitrary buffer input to G2 elements + * and then comparing to newly written output buffer. + */ +TEST(Streaming, ReadG2ElemsToBuffer) +{ + constexpr size_t N = 10; + constexpr size_t element_size = sizeof(Fqe) * 2; + constexpr size_t buffer_size = N * element_size; + + libff::init_alt_bn128_params(); + + std::vector result; + std::vector elems; + result.reserve(N); + elems.reserve(N); + + // buffers for reading/writing G2 elements + char out_buffer[buffer_size]; + char * buffer = DeepState_CStr(buffer_size); + LOG(TRACE) << "Input buffer: " << buffer; + + // read from elements out of buffer + streaming::read_g2_elements_from_buffer(elems, buffer, element_size); + for (size_t i = 0; i < N; i++) + { + elems[i].to_affine_coordinates(); + result.emplace_back(elems[i]); + } + streaming::write_g2_elements_to_buffer(result, out_buffer); + + ASSERT(memcmp(buffer, out_buffer, buffer_size)) + << "Input buffer " << buffer << " is not equal to output buffer " << out_buffer; +} + + +/* Streaming_BoringWriteG2ElemsToBuffer + * + * Concrete test vector for writing random G2 elements to + * a buffer, reading and then validating them. + */ +TEST(Streaming, BoringWriteG2ElemsToBuffer) +{ + constexpr size_t N = 100; + constexpr size_t element_size = sizeof(Fqe) * 2; + constexpr size_t buffer_size = N * element_size; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector result; + std::vector expected; + result.reserve(N); + expected.reserve(N); + + char buffer[buffer_size]; + + for (size_t i = 0; i < N; ++i) + { + G2 point = G2::random_element(); + point.to_affine_coordinates(); + expected.emplace_back(point); + } + streaming::write_g2_elements_to_buffer(expected, buffer); + + streaming::read_g2_elements_from_buffer(result, buffer, buffer_size); + for (size_t i = 0; i < N; ++i) + { + test_utils::validate_g2_point(result[i], expected[i]); + } +} + + +/* Streaming_WriteG2ElemToBuffer + * + * Tests writing a single generated G2 element to a buffer, + * and then reading and validating it. + */ +TEST(Streaming, WriteG2ElemToBuffer) +{ + constexpr size_t N = 1; + constexpr size_t element_size = sizeof(Fqe) * 2; + constexpr size_t buffer_size = N * element_size; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector result; + std::vector expected; + result.reserve(N); + expected.reserve(N); + + char buffer[buffer_size]; + + // generate Jacobian coordinates for G2 element + G2 point = test_utils::DeepState_G2(); + point.to_affine_coordinates(); + expected.emplace_back(point); + + streaming::write_g2_elements_to_buffer(expected, buffer); + streaming::read_g2_elements_from_buffer(result, buffer, buffer_size); + + test_utils::validate_g2_point(result[0], expected[0]); +} + + +/* Streaming_BoringReadWriteTranscripts + * + * Concrete test for generating random element points, + * writing and reading them to temporary transcript files, + * and validating the points. + */ +TEST(Streaming, BoringReadWriteTranscripts) +{ + constexpr size_t G1_N = 100; + constexpr size_t G2_N = 2; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector g1_result; + std::vector g1_expected; + std::vector g2_result; + std::vector g2_expected; + streaming::Manifest manifest; + + manifest.transcript_number = 0; + manifest.total_transcripts = 1; + manifest.total_g1_points = G1_N; + manifest.total_g2_points = G2_N; + manifest.num_g1_points = G1_N; + manifest.num_g2_points = G2_N; + manifest.start_from = 0; + + for (size_t i = 0; i < G1_N; ++i) + { + G1 g1_point = G1::random_element(); + g1_point.to_affine_coordinates(); + g1_expected.emplace_back(g1_point); + } + + for (size_t i = 0; i < G2_N; ++i) + { + G2 g2_point = G2::random_element(); + g2_point.to_affine_coordinates(); + g2_expected.emplace_back(g2_point); + } + + streaming::write_transcript(g1_expected, g2_expected, manifest, "/tmp/rwt_test"); + streaming::read_transcript(g1_result, g2_result, manifest, "/tmp/rwt_test"); + + for (size_t i = 0; i < G1_N; ++i) + { + test_utils::validate_g1_point(g1_result[i], g1_expected[i]); + } + + for (size_t i = 0; i < G2_N; ++i) + { + test_utils::validate_g2_point(g2_result[i], g2_expected[i]); + } +} + + +/* Streaming_ReadWriteTranscripts + * + * Test for generating arbitrary element points, + * writing and reading them to temporary transcript files, + * and validating the points. + */ +TEST(Streaming, ReadWriteTranscripts) +{ + constexpr size_t G1_N = 10; + constexpr size_t G2_N = 2; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector g1_result; + std::vector g1_expected; + std::vector g2_result; + std::vector g2_expected; + streaming::Manifest manifest; + + manifest.transcript_number = 0; + manifest.total_transcripts = 1; + manifest.total_g1_points = G1_N; + manifest.total_g2_points = G2_N; + manifest.num_g1_points = G1_N; + manifest.num_g2_points = G2_N; + manifest.start_from = 0; + + for (size_t i = 0; i < G1_N; ++i) + { + G1 g1_point = test_utils::DeepState_G1(); + g1_point.to_affine_coordinates(); + g1_expected.emplace_back(g1_point); + } + + for (size_t i = 0; i < G2_N; ++i) + { + G2 g2_point = test_utils::DeepState_G2(); + g2_point.to_affine_coordinates(); + g2_expected.emplace_back(g2_point); + } + + streaming::write_transcript(g1_expected, g2_expected, manifest, "/tmp/rwt_test"); + streaming::read_transcript(g1_result, g2_result, manifest, "/tmp/rwt_test"); + + for (size_t i = 0; i < G1_N; ++i) + { + test_utils::validate_g1_point(g1_result[i], g1_expected[i]); + } + + for (size_t i = 0; i < G2_N; ++i) + { + test_utils::validate_g2_point(g2_result[i], g2_expected[i]); + } +} + + +/* Streaming_WriteReadFieldElementsFile + * + * Test for generating for comparing original and resultant field + * elements after writing and reading from temporary file. + */ +TEST(Streaming, WriteReadFieldElementsFile) +{ + libff::alt_bn128_pp::init_public_params(); + + std::vector original; + std::vector result; + + //Fr point = (Fr) DeepState_UInt64(); + Fr point = test_utils::DeepState_Fe(); + original.emplace_back(point); + + // write and read from temporary file back to field elem vector + streaming::write_field_elements_to_file(original, "/tmp/fe_test"); + streaming::read_field_elements_from_file(result, "/tmp/fe_test", 1); + + // read will add extra elem to result, so do check of first elem + ASSERT_EQ(original.size() + 1, result.size()) + << "original vec size + 1 not equivalent to size of result"; + ASSERT(original[0] == result[0]) + << "Resultant field elem not equal to original field elem"; +} + + +/* Streaming_ReadTranscriptG1Points + * + * Test for generating and validating G1 elements that + * written and read from a temporary transcript. + */ +TEST(Streaming, ReadTranscriptG1Points) +{ + constexpr size_t G1_N = 1; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::alt_bn128_pp::init_public_params(); + + std::vector g1_x; + std::vector g1_out; + std::vector g2_x = {}; + + // generate initial transcript manifest + streaming::Manifest manifest; + manifest.transcript_number = 0; + manifest.total_transcripts = 1; + manifest.total_g1_points = G1_N; + manifest.total_g2_points = 0; + manifest.num_g1_points = G1_N; + manifest.num_g2_points = 0; + manifest.start_from = 0; + + for (size_t i = 0; i < G1_N; ++i) + { + G1 g1_point = test_utils::DeepState_G1(); + g1_point.to_affine_coordinates(); + g1_x.emplace_back(g1_point); + } + + streaming::write_transcript(g1_x, g2_x, manifest, "/tmp/g1_test"); + streaming::read_transcript_g1_points(g1_out, "/tmp/g1_test", 0, G1_N); + + for (size_t i = 0; i < G1_N; ++i) + { + test_utils::validate_g1_point(g1_x[i], g1_out[i]); + } +} + + +/* Streaming_ReadTranscriptG2Points + * + * Test for generating and validating G2 elements that + * written and read from a temporary transcript. + */ +TEST(Streaming, ReadTranscriptG2Points) +{ + constexpr size_t G2_N = 1; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::alt_bn128_pp::init_public_params(); + + std::vector g1_x = {}; + std::vector g2_x; + std::vector g2_out; + + // generate initial transcript manifest + streaming::Manifest manifest; + manifest.transcript_number = 0; + manifest.total_transcripts = 1; + manifest.total_g1_points = 0; + manifest.total_g2_points = G2_N; + manifest.num_g1_points = 0; + manifest.num_g2_points = G2_N; + manifest.start_from = 0; + + for (size_t i = 0; i < G2_N; ++i) + { + G2 g2_point = test_utils::DeepState_G2(); + g2_point.to_affine_coordinates(); + g2_x.emplace_back(g2_point); + } + + streaming::write_transcript(g1_x, g2_x, manifest, "/tmp/g2_test"); + streaming::read_transcript_g2_points(g2_out, "/tmp/g2_test", 0, G2_N); + + for (size_t i = 0; i < G2_N; ++i) + { + test_utils::validate_g2_point(g2_x[i], g2_out[i]); + } +} diff --git a/setup-tools/test/deepstate-tests/test_compute_range_polynomial.cpp b/setup-tools/test/deepstate-tests/test_compute_range_polynomial.cpp new file mode 100644 index 0000000..3ae6531 --- /dev/null +++ b/setup-tools/test/deepstate-tests/test_compute_range_polynomial.cpp @@ -0,0 +1,140 @@ +#include +#include + +#include +#include +#include + +#include +#include +#include +#include + +#include "test_utils.hpp" + +#include + +using namespace deepstate; + + +/* Range_GeneratorPolynomial + * + * Tests generating polynomial coefficients for + * varying degrees and comparing results when written + * and read as field elements from a transcript file + */ +TEST(Range, GeneratorPolynomial) +{ + size_t DEGREE = (size_t) DeepState_MinUShort(1); + LOG(TRACE) << "Testing with polynomial degree " << DEGREE; + + libff::alt_bn128_pp::init_public_params(); + + std::vector res1; + std::vector res2; + + // compute generator polynomial and store coefficients + std::vector>> subproduct_tree; + libfqfft::compute_subproduct_tree(log2(DEGREE), subproduct_tree); + res1 = subproduct_tree[log2(DEGREE)][0]; + + // call generator function, and read stored coefficients from transcript file + generator::compute_generator_polynomial>(DEGREE); + streaming::read_field_elements_from_file(res2, "../setup_db/generator.dat", DEGREE); + + // check each indiidual element, res2 may have larger buffer size + for (size_t i = 0; i < res1.size(); i++) { + ASSERT(res1[i] == res2[i]) + << "Generator polynomial coefficients not equal"; + } +} + + +/* Range_RangePolynomials + * + * Tests compute_range_polynomials for computing AZTEC signature + * points after computing a generator polynomial and a round of + * MPC setup. + */ +TEST(Range, RangePolynomials) +{ + constexpr size_t DEGREE = 0x10000; + const size_t range_index = DeepState_MinInt(1); + + libff::alt_bn128_pp::init_public_params(); + + // generate coeffs for generator polynomial + generator::compute_generator_polynomial>(DEGREE); + + // run setup to produce an initial transcript + run_setup("../setup_db", range_index, 1); + + // produce output for memory mapping within compute_range_polynomials + std::vector generator_polynomial; + std::vector g1_x(DEGREE); + std::vector g2_x(DEGREE); + streaming::Manifest manifest; + + streaming::read_field_elements_from_file(generator_polynomial, "../setup_db/generator.dat", DEGREE + 1); + streaming::read_transcript(g1_x, g2_x, manifest, "../setup_db/transcript0_out.dat"); + g1_x.insert(g1_x.begin(), G1::one()); + + { + std::ofstream file("../setup_db/generator_prep.dat"); + file.write((char *)&generator_polynomial[0], generator_polynomial.size() * sizeof(Fr)); + } + + { + std::ofstream file("../setup_db/g1_x_prep.dat"); + file.write((char *)&g1_x[0], g1_x.size() * sizeof(G1)); + } + + // calculate signature point for rangeproofs + compute_range_polynomials(range_index, DEGREE); +} + + +TEST(Range, Window) +{ + constexpr size_t DEGREE = 0x100; + + libff::init_alt_bn128_params(); + + // initialize generator polynomial + std::vector>> subproduct_tree; + libfqfft::compute_subproduct_tree(log2(DEGREE), subproduct_tree); + std::vector generator_polynomial = subproduct_tree[log2(DEGREE)][0]; + + libff::alt_bn128_Fr x = test_utils::DeepState_Fe(); + libff::alt_bn128_Fr accumulator = x; + + std::vector g1_x; + g1_x.reserve(DEGREE + 1); + g1_x.emplace_back(libff::alt_bn128_G1::one()); + for (size_t i = 1; i < DEGREE + 1; ++i) + { + libff::alt_bn128_G1 pt = libff::alt_bn128_G1::one(); + pt = accumulator * pt; + g1_x.emplace_back(pt); + accumulator *= x; + } + + libff::alt_bn128_G1 h = libff::alt_bn128_G1::zero(); + for (size_t i = 0; i < generator_polynomial.size(); ++i) + { + libff::alt_bn128_G1 pt = generator_polynomial[i] * g1_x[i]; + h = h + pt; + } + + for (size_t i = 0; i < DEGREE; ++i) + { + libff::alt_bn128_Fr fa; + libff::alt_bn128_G1 process_result = process_range(i, fa, &g1_x[0], &generator_polynomial[0], 0, DEGREE); + libff::alt_bn128_G1 t0 = x * process_result; + libff::alt_bn128_G1 t1 = (-libff::alt_bn128_Fr(i)) * process_result; + libff::alt_bn128_G1 result = t0 + t1; + result.to_affine_coordinates(); + h.to_affine_coordinates(); + test_utils::validate_g1_point<4>(result, h); + } +} diff --git a/setup-tools/test/deepstate-tests/test_entry_points.cpp b/setup-tools/test/deepstate-tests/test_entry_points.cpp new file mode 100644 index 0000000..9b75097 --- /dev/null +++ b/setup-tools/test/deepstate-tests/test_entry_points.cpp @@ -0,0 +1,247 @@ +#include + +#include + +#include +#include + +#include + +#define TRANSCRIPT_PATH "../setup_db" + +using namespace deepstate; + + +class EntryPoint: public Test { + public: + + size_t num_g1_points; + size_t num_g2_points; + + size_t progress; + + // each test we run requires interacting with actual transcripts, so this + // setup constructor ensures that we create a temporary transcript path. + void SetUp(void) + { + LOG(TRACE) << "Initializing setup for entry point test"; + + std::string transcript_dir(TRANSCRIPT_PATH); + + // function should check inodes regardless of filetype + if (!streaming::is_file_exist(transcript_dir)) + { + LOG(ERROR) << "Transcript directory doesn't exist."; + } + + // if tests run simultaneous, kill if transcript dir and transcript 0 exists + std::string _zero_transcript(TRANSCRIPT_PATH); + _zero_transcript += "/transcript0_out.dat"; + if (streaming::is_file_exist(_zero_transcript)) + { + LOG(TRACE) << "Transcript path exists and is populated."; + Test(); + } + + libff::alt_bn128_pp::init_public_params(); + + num_g1_points = DeepState_IntInRange(1, 250000); + num_g2_points = DeepState_IntInRange(1, 100); + + // emulates simplified functionality of `run_setup` without unused/uncovered functionality. + // we will force the test to use a simulated participant. + + std::vector checksums; + size_t num = 0; + std::string filename = getTranscriptInPath(transcript_dir, num); + + while (streaming::is_file_exist(filename)) + { + std::vector checksum = streaming::read_checksum(filename); + checksums.insert(checksums.end(), checksum.begin(), checksum.end()); + ++num; + filename = getTranscriptInPath(transcript_dir, num); + } + + char checksum_of_checksums[checksum::BLAKE2B_CHECKSUM_LENGTH] = {0}; + checksum::create_checksum(&checksums[0], checksums.size(), &checksum_of_checksums[0]); + + Fr simulated_secret = utils::convert_buffer_to_field_element(&checksum_of_checksums[0], checksum::BLAKE2B_CHECKSUM_LENGTH); + Secret multiplicand(simulated_secret); + + if (num_g1_points > 0) + { + compute_initial_transcripts(transcript_dir, num_g1_points, num_g2_points, \ + POINTS_PER_TRANSCRIPT, multiplicand, progress); + } + else + { + size_t num = 0; + std::string filename = getTranscriptInPath(transcript_dir, num); + while (streaming::is_file_exist(filename)) + { + compute_existing_transcript(transcript_dir, num, multiplicand, progress); + filename = getTranscriptInPath(transcript_dir, ++num); + } + } + } + + void TearDown(void) { + LOG(TRACE) << "Tearing down setup for entry point test"; + // clear setup_db/ or other transcript path + } +}; + + +/* EntryPoint_ValidateSetup + * + * Test fixture for validating points parsed out of written transcript + * file from fixture SetUp constructor. Equivalent to verify-set functionality, + * but with additional sanity checks. + */ +TEST_F(EntryPoint, ValidateSetup) +{ + std::vector g1_x; + std::vector g2_x; + + streaming::Manifest manifest; + + std::string transcript_path = TRANSCRIPT_PATH; + transcript_path += "/transcript0_out.dat"; + + // check by reading out points and validating + streaming::read_transcript_g1_points(g1_x, transcript_path, 0, 1); + streaming::read_transcript_g2_points(g2_x, transcript_path, 0, 1); + ASSERT(g1_x.size() || g2_x.size()) + << "Missing either G1 or G2 zero point."; + ASSERT(g1_x.size() == num_g1_points) + << "Number of G1 points parsed not equal to original size " << num_g1_points; + ASSERT(g2_x.size() == num_g2_points) + << "Number of G2 points parsed not equal to original size " << num_g2_points; + + // check by validating manifest contents + streaming::read_transcript_manifest(manifest, transcript_path); + ASSERT(manifest.total_g1_points == num_g1_points) + << "Manifest contains wrong total number of G1 points."; + ASSERT(manifest.total_g2_points == num_g2_points) + << "Manifest contains wrong total number of G2 points."; +} + + +/* EntryPoint_VerifyFirst + * + * Test fixture for verifying the first transcript generated by a first participant + * in trusted setup. Includes functionality from ValidateSetup in order to enforce + * necessary sanity checks and maximize test coverage. + */ +TEST_F(EntryPoint, VerifyFirst) +{ + std::string transcript_path = TRANSCRIPT_PATH; + transcript_path += "/transcript0_out.dat"; + + ASSERT(streaming::is_file_exist(transcript_path)) + << "Transcript not found in path: " << transcript_path; + + streaming::Manifest manifest; + + std::vector g1_x; + std::vector g2_x; + std::vector g1_0_0; + std::vector g2_0_0; + std::vector g1_x_previous; + std::vector g2_y; + + // Read first points from transcript 0. + streaming::read_transcript_g1_points(g1_0_0, transcript_path, 0, 1); + streaming::read_transcript_g2_points(g2_0_0, transcript_path, 0, 1); + + ASSERT(g1_0_0.size() || g2_0_0.size()) + << "Missing either G1 or G2 zero point."; + + streaming::read_transcript_manifest(manifest, transcript_path); + + // If we are transcript 0 we need to add the generator point to the beginning of the series. + // This allows validating a single point as there will be at least 2 in the series. + g1_x.push_back(G1::one()); + g2_x.push_back(G2::one()); + + // First participant, first transcript. Discard our g2^y point. + ASSERT(manifest.transcript_number == 0) + << "Transcript read not an initial transcript from first participant."; + + streaming::read_transcript(g1_x, g2_x, manifest, transcript_path); + g2_x.pop_back(); + + LOG(TRACE) << "Verifying and validating transcripts"; + validate_transcript(g1_0_0[0], g2_0_0[0], g1_x, g2_x, g1_x_previous, g2_y); + LOG(TRACE) << "Transcript is valid"; +} + + +/* EntryPoint_VerifyPrevious + * + * Test fixture for verification of a current transcript and manifest + * as well as validating against previous and transcript 0. +TEST_F(EntryPoint, DISABLED_VerifyPrevious) +{ + // initialize paths from common transcript path + std::string transcript = TRANSCRIPT_PATH; + transcript += "/transcript1_out.dat"; + std::string previous_transcript(TRANSCRIPT_PATH); + previous_transcript += "/transcript0_out.dat"; + std::string first_transcript(TRANSCRIPT_PATH); + first_transcript += "/transcript0_out.dat"; + + ASSERT(streaming::is_file_exist(transcript)) + << "Transcript not found in path: " << transcript; + ASSERT(streaming::is_file_exist(previous_transcript)) + << "Previous transcript not found in path: " << previous_transcript; + ASSERT(streaming::is_file_exist(first_transcript)) + << "Transcript 0 not found in path: " << first_transcript; + + streaming::Manifest manifest; + streaming::Manifest previous_manifest; + + std::vector g1_x; + std::vector g2_x; + std::vector g1_0_0; + std::vector g2_0_0; + std::vector g1_x_previous; + std::vector g2_y; + + // Read first points from transcript 0. + streaming::read_transcript_g1_points(g1_0_0, first_transcript, 0, 1); + streaming::read_transcript_g2_points(g2_0_0, first_transcript, 0, 1); + + ASSERT(g1_0_0.size() || g2_0_0.size()) + << "Missing either G1 or G2 zero point."; + + streaming::read_transcript_manifest(manifest, transcript); + streaming::read_transcript_manifest(previous_manifest, previous_transcript); + validate_manifest(previous_manifest, manifest); + + // If this transcript and previous transcript are 0, we are going to check this transcript was built + // on top of the previous participants using the g2^y and previous g1_x points. + if (manifest.transcript_number == 0 && previous_manifest.transcript_number == 0) + { + streaming::read_transcript_g1_points(g1_x_previous, previous_transcript, 0, 1); + streaming::read_transcript(g1_x, g2_x, manifest, transcript); + // Extract g2_y point from this transcript. + g2_y.push_back(g2_x.back()); + g2_x.pop_back(); + } + else + { + // Read the last points from the previous transcript to validate the sequence. + // Second to last g2 point if the previous transcript is 0, due to g2^y being tacked on. + streaming::read_transcript_g1_points(g1_x, previous_transcript, -1, 1); + size_t from_g2_end = previous_manifest.transcript_number == 0 ? -2 : -1; + streaming::read_transcript_g2_points(g2_x, previous_transcript, from_g2_end, 1); + streaming::read_transcript(g1_x, g2_x, manifest, transcript); + } + + LOG(TRACE) << "Verifying and validating transcripts"; + validate_transcript(g1_0_0[0], g2_0_0[0], g1_x, g2_x, g1_x_previous, g2_y); + LOG(TRACE) << "Transcript is valid"; +} +*/ diff --git a/setup-tools/test/deepstate-tests/test_setup.cpp b/setup-tools/test/deepstate-tests/test_setup.cpp new file mode 100644 index 0000000..5c0a0d3 --- /dev/null +++ b/setup-tools/test/deepstate-tests/test_setup.cpp @@ -0,0 +1,356 @@ +#include +#include +#include +#include "test_utils.hpp" + +#include + +using namespace deepstate; + + +/* Setup_BoringBatchNormalizeWorks + * + * Concrete test for generating a vector of + * random G1 Jacobian points, normalizing to affine + * coordinates and comparing. + */ +TEST(Setup, BoringBatchNormalizeWorks) +{ + constexpr size_t N = 100; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector points; + std::vector normalized; + std::vector dummy; + + points.reserve(N); + normalized.reserve(N); + + for (size_t i = 0; i < N; ++i) + { + G1 point = G1::random_element(); + points.emplace_back(point); + normalized.emplace_back(point); + dummy.emplace_back(point); + } + + utils::batch_normalize(0, N, &normalized[0], &dummy[0]); + for (size_t i = 0; i < N; ++i) + { + points[i].to_affine_coordinates(); + test_utils::validate_g1_point(points[i], normalized[i]); + } +} + + +/* Setup_BatchNormalize + * + * Testing normalization to affine coordinate using a + * single G1 point. + */ +TEST(Setup, BatchNormalize) +{ + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector points; + std::vector normalized; + std::vector dummy; + + points.reserve(1); + normalized.reserve(1); + + // generate one point per run + G1 point = test_utils::DeepState_G1(); + + points.emplace_back(point); + normalized.emplace_back(point); + dummy.emplace_back(point); + + utils::batch_normalize(0, 1, &normalized[0], &dummy[0]); + + // convert jacobian point to affine coordinates, and compare + points[0].to_affine_coordinates(); + test_utils::validate_g1_point(points[0], normalized[0]); +} + + +/* Setup_BoringSameRatioPreprocess + * + * Tests polynomial evaluation validation using + * generated random field element and a pairing check. + */ +TEST(Setup, BoringSameRatioPreprocess) +{ + libff::init_alt_bn128_params(); + + size_t N = 100; + std::vector points; + points.reserve(N); + + Fr y = Fr::random_element(); + Fr accumulator = y; + + for (size_t i = 0; i < N; ++i) + { + points.emplace_back(accumulator * G1::one()); + accumulator = accumulator * y; + } + + VerificationKey g1_key; + VerificationKey g2_key; + + g2_key.lhs = y * G2::one(); + g2_key.rhs = G2::one(); + + g1_key = same_ratio_preprocess(points); + ASSERT(same_ratio(g1_key, g2_key) == true); +} + + +/* Setup_SameRatioPreprocess + * + * Tests polynomial evaluation validation using + * generated field element and a pairing check. + */ +TEST(Setup, SameRatioPreprocess) +{ + libff::init_alt_bn128_params(); + + size_t N = 3; + std::vector points; + points.reserve(N); + + Fr y = test_utils::DeepState_Fe(); + LOG(TRACE) << "Testing with input value: " << y.as_bigint().as_ulong(); + Fr accumulator = y; + + for (size_t i = 0; i < 3; ++i) + { + points.emplace_back(accumulator * G1::one()); + accumulator = accumulator * y; + } + + VerificationKey g1_key; + VerificationKey g2_key; + + g2_key.lhs = y * G2::one(); + g2_key.rhs = G2::one(); + + g1_key = same_ratio_preprocess(points); + ASSERT(same_ratio(g1_key, g2_key) == true); +} + + +/* Setup_SameRatio1 + * + * Test to validate that g1_key.lhs * g2.lhs is equal to + * g1_key.rhs * g2_key.rhs using a generated ratio element. + */ +TEST(Setup, SameRatio1) +{ + libff::init_alt_bn128_params(); + + // initialize a constant field elem with arbitrary value + Fr x = Fr("1444073846434098342"); + + Fr ratio = test_utils::DeepState_Fe(); + LOG(TRACE) << "Testing same_ratio with input value " << ratio.as_bigint().as_ulong(); + + VerificationKey g1_key; + g1_key.lhs = x * G1::one(); + g1_key.rhs = ratio * g1_key.lhs; + + VerificationKey g2_key; + g2_key.lhs = ratio * G2::one(); + g2_key.rhs = G2::one(); + + ASSERT(same_ratio(g1_key, g2_key) == true) + << "verification key g1 and g2 do not share same ratio"; +} + + +/* Setup_SameRatio2 + * + * Test to validate that g1_key.lhs * g2.lhs is equal to + * g1_key.rhs * g2_key.rhs using a generated ratio element. + */ +TEST(Setup, SameRatio2) +{ + libff::init_alt_bn128_params(); + + Fr x = test_utils::DeepState_Fe(); + LOG(TRACE) << "Testing same_ratio with input value " << x.as_bigint().as_ulong(); + + Fr ratio = Fr("1444073846434098342"); + + VerificationKey g1_key; + g1_key.lhs = x * G1::one(); + g1_key.rhs = ratio * g1_key.lhs; + + VerificationKey g2_key; + g2_key.lhs = ratio * G2::one(); + g2_key.rhs = G2::one(); + + ASSERT(same_ratio(g1_key, g2_key) == true) + << "verification key g1 and g2 do not share same ratio"; +} + + +/* Setup_BoringValidatePolynomialEvaluation + * + * Validates that generated vector of points + * represents powering sequence based on comparator + * element. + */ +TEST(Setup, BoringValidatePolynomialEvaluation) +{ + constexpr size_t N = 100; + + libff::init_alt_bn128_params(); + + std::vector points; + points.reserve(N); + + Fr y = Fr::random_element(); + Fr accumulator = y; + + for (size_t i = 0; i < 100; ++i) + { + points.emplace_back(accumulator * G1::one()); + accumulator = accumulator * y; + } + + G2 comparator = y * G2::one(); + ASSERT(validate_polynomial_evaluation(points, comparator) == true); +} + + +/* Setup_ValidatePolynomialEvaluation + * + * Validates generated vectors of points + * represents powering sequence based on a generated + * comparator value. + */ +TEST(Setup, ValidatePolynomialEvaluation) +{ + constexpr size_t N = 3; + + libff::init_alt_bn128_params(); + + std::vector points; + points.reserve(N); + + Fr y = test_utils::DeepState_Fe(); + LOG(TRACE) << "Testing with input value: " << y.as_bigint().as_ulong(); + Fr accumulator = y; + + for (size_t i = 0; i < N; ++i) + { + points.emplace_back(accumulator * G1::one()); + accumulator = accumulator * y; + } + + G2 comparator = y * G2::one(); + ASSERT(validate_polynomial_evaluation(points, comparator) == true); +} + + +/* Setup_BoringValidateTranscript + * + * Tests that a transcript matches powering sequences for + * structured reference string. + */ +TEST(Setup, BoringValidateTranscript) +{ + constexpr size_t N = 100; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector g1_x_prev, g1_x; + std::vector g2_x_prev, g2_x; + G2 g2_y; + + { + Fr y = Fr::random_element(); + Fr accumulator = y; + for (size_t i = 0; i < N; ++i) + { + g1_x_prev.emplace_back(accumulator * G1::one()); + g2_x_prev.emplace_back(accumulator * G2::one()); + + accumulator = accumulator * y; + } + } + + { + Fr y = Fr::random_element(); + Fr accumulator = y; + for (size_t i = 0; i < N; ++i) + { + g1_x.emplace_back(accumulator * g1_x_prev[i]); + g2_x.emplace_back(accumulator * g2_x_prev[i]); + + accumulator = accumulator * y; + } + g2_y = libff::fixed_window_wnaf_exp(5, G2::one(), y.as_bigint()); + } + + ASSERT(validate_transcript(g1_x[0], g2_x[0], g1_x, g2_x, {g1_x_prev[0]}, {g2_y}) == true) + << "Transcript validation failed"; +} + + +/* Setup_ValidateTranscript + * + * Tests that a transcript matches powering sequences for + * structured reference string. + */ +TEST(Setup, ValidateTranscript) +{ + constexpr size_t N = 10; + constexpr size_t num_limbs = sizeof(Fq) / GMP_NUMB_BYTES; + + libff::init_alt_bn128_params(); + + std::vector g1_x_prev, g1_x; + std::vector g2_x_prev, g2_x; + G2 g2_y; + + { + Fr y = test_utils::DeepState_Fe(); + LOG(TRACE) << "Testing with input accumulator value: " << y.as_bigint().as_ulong(); + Fr accumulator = y; + + for (size_t i = 0; i < N; ++i) + { + g1_x_prev.emplace_back(accumulator * G1::one()); + g2_x_prev.emplace_back(accumulator * G2::one()); + + accumulator = accumulator * y; + } + } + + + { + Fr y = test_utils::DeepState_Fe(); + LOG(TRACE) << "Testing with input accumulator value: " << y.as_bigint().as_ulong(); + Fr accumulator = y; + + for (size_t i = 0; i < N; ++i) + { + g1_x.emplace_back(accumulator * g1_x_prev[i]); + g2_x.emplace_back(accumulator * g2_x_prev[i]); + + accumulator = accumulator * y; + } + g2_y = libff::fixed_window_wnaf_exp(5, G2::one(), y.as_bigint()); + } + + ASSERT(validate_transcript(g1_x[0], g2_x[0], g1_x, g2_x, {g1_x_prev[0]}, {g2_y}) == true) + << "Transcript validation failed"; +} diff --git a/setup-tools/test/deepstate-tests/test_utils.hpp b/setup-tools/test/deepstate-tests/test_utils.hpp new file mode 100644 index 0000000..5b8813d --- /dev/null +++ b/setup-tools/test/deepstate-tests/test_utils.hpp @@ -0,0 +1,102 @@ +#pragma once + +#include "stddef.h" + +#include +#include + +#include + +using namespace deepstate; +using Fq2 = libff::alt_bn128_Fq2; + +namespace test_utils +{ + + +/* re-implementation of convert_buffer_to_field_element but with DeepState input generation */ +template +DEEPSTATE_INLINE FieldT DeepState_Fe(void) +{ + constexpr size_t bytes_per_element = sizeof(FieldT); + constexpr size_t num_limbs = bytes_per_element / GMP_NUMB_BYTES; + + uint8_t * _buffer = (uint8_t *) DeepState_CStr(bytes_per_element); + std::vector buffer(_buffer, _buffer + sizeof(_buffer)); + + FieldT element; + auto element_bigint = element.as_bigint(); + for (size_t i = 0; i < sizeof(buffer); i += bytes_per_element) + { + mp_limb_t *element_ptr = (mp_limb_t *)((char *)(&*buffer.begin()) + 1); + for (size_t j = 0; j < num_limbs; ++j) + { + mp_limb_t limb = element_ptr[j]; + if (streaming::isLittleEndian()) + { + limb = __builtin_bswap64(limb); + } + element_bigint.data[j] = limb; + } + } + return FieldT(element_bigint); +} + + +/* helper for generating G1 element with Jacobian coordinates */ +DEEPSTATE_INLINE G1 DeepState_G1(void) +{ + Fq x = DeepState_Fe(); + Fq y = DeepState_Fe(); + Fq z = DeepState_Fe(); + return G1(x, y, z); +} + + +/* helper for generating G2 element with Jacobian coordinates */ +DEEPSTATE_INLINE G2 DeepState_G2(void) +{ + Fq2 x = Fq2(DeepState_Fe(), DeepState_Fe()); + Fq2 y = Fq2(DeepState_Fe(), DeepState_Fe()); + Fq2 z = Fq2(DeepState_Fe(), DeepState_Fe()); + return G2(x, y, z); +} + + +template +void validate_g1_point(libff::alt_bn128_G1 &result, libff::alt_bn128_G1 &expected) +{ + libff::bigint result_x = result.X.as_bigint(); + libff::bigint result_y = result.Y.as_bigint(); + libff::bigint expected_x = expected.X.as_bigint(); + libff::bigint expected_y = expected.Y.as_bigint(); + + for (size_t i = 0; i < N; ++i) + { + ASSERT_EQ(result_x.data[i], expected_x.data[i]); + ASSERT_EQ(result_y.data[i], expected_y.data[i]); + } +} + +template +void validate_g2_point(libff::alt_bn128_G2 &result, libff::alt_bn128_G2 &expected) +{ + libff::bigint result_x0 = result.X.c0.as_bigint(); + libff::bigint result_y0 = result.Y.c0.as_bigint(); + libff::bigint result_x1 = result.X.c1.as_bigint(); + libff::bigint result_y1 = result.Y.c1.as_bigint(); + + libff::bigint expected_x0 = expected.X.c0.as_bigint(); + libff::bigint expected_y0 = expected.Y.c0.as_bigint(); + libff::bigint expected_x1 = expected.X.c1.as_bigint(); + libff::bigint expected_y1 = expected.Y.c1.as_bigint(); + + for (size_t i = 0; i < N; ++i) + { + ASSERT_EQ(result_x0.data[i], expected_x0.data[i]); + ASSERT_EQ(result_y0.data[i], expected_y0.data[i]); + ASSERT_EQ(result_x1.data[i], expected_x1.data[i]); + ASSERT_EQ(result_y1.data[i], expected_y1.data[i]); + } +} +} // namespace utils From 7db17c777bdd742ab079068847435de829f20558 Mon Sep 17 00:00:00 2001 From: ex0dus-0x Date: Mon, 30 Sep 2019 11:09:10 -0400 Subject: [PATCH 2/2] Fix and clean up tests to pass --- .../deepstate-tests/test_aztec_common.cpp | 87 ------------------- .../test_compute_range_polynomial.cpp | 22 +++-- 2 files changed, 10 insertions(+), 99 deletions(-) diff --git a/setup-tools/test/deepstate-tests/test_aztec_common.cpp b/setup-tools/test/deepstate-tests/test_aztec_common.cpp index 6c2e317..1914727 100644 --- a/setup-tools/test/deepstate-tests/test_aztec_common.cpp +++ b/setup-tools/test/deepstate-tests/test_aztec_common.cpp @@ -130,49 +130,6 @@ TEST(Streaming, ReadG1ElemsToBuffer) } -/* Streaming_BoringReadG1ElemsFromBufferFile - * - * Concrete test vector for testing reading empty - * transcript file to G1 element, writing to output buffer - * and comparing -TEST(Streaming, DISABLED_BoringReadG1ElemsFromBufferFile) -{ - // transcript file to tests should actually exist - std::string transcript_path("./transcript/transcript00.dat"); - if (!streaming::is_file_exist(transcript_path)) { - LOG(ERROR) << "Transcript path: " << transcript_path << " should be \ - manually initialized with empty transcript file"; - } - - constexpr size_t buffer_size = sizeof(Fq) * 2; - - libff::init_alt_bn128_params(); - - std::vector result; - std::vector elems; - - char buffer[buffer_size]; - char out_buffer[buffer_size]; - - // read to std::vec, convert to char * buffer with std::copy - auto _buffer = streaming::read_file_into_buffer(transcript_path); - LOG(TRACE) << "Input buffer size: " << _buffer.size(); - LOG(TRACE) << "Expected buffer size: " << buffer_size; - - std::copy(_buffer.begin(), _buffer.end(), buffer); - - // read contents from file to G1 element - streaming::read_g1_elements_from_buffer(elems, buffer, buffer_size); - elems[0].to_affine_coordinates(); - result.emplace_back(elems[0]); - - // write back to output buffer and compare - streaming::write_g1_elements_to_buffer(result, out_buffer); - ASSERT(memcmp(buffer, out_buffer, buffer_size)) - << "out_buffer contents: " << out_buffer; -} - */ - /* Streaming_BoringWriteG1ElemsToBuffer * * Concrete test vector for writing random G1 elements to @@ -243,50 +200,6 @@ TEST(Streaming, WriteG1ElemToBuffer) } -/* Streaming_BoringReadG2ElemsFromBufferFile - * - * Concrete test vector for testing reading empty - * transcript file to G2 element, writing to output buffer - * and comparing. -TEST(Streaming, DISABLED_BoringReadG2ElemsFromBufferFile) -{ - // transcript file to tests hould actually exist - std::string transcript_path("./transcript/transcript00.dat"); - if (!streaming::is_file_exist(transcript_path)) { - LOG(ERROR) << "Transcript path: " << transcript_path << " should be \ - manually initialized with empty transcript file"; - } - - constexpr size_t buffer_size = sizeof(Fqe) * 2; - - libff::init_alt_bn128_params(); - - std::vector result; - std::vector elems; - - char buffer[buffer_size]; - char out_buffer[buffer_size]; - - // read to std::vec, convert to char * buffer with std::copy - auto _buffer = streaming::read_file_into_buffer(transcript_path); - LOG(TRACE) << "Input buffer size: " << _buffer.size(); - LOG(TRACE) << "Expected buffer size: " << buffer_size; - - std::copy(_buffer.begin(), _buffer.end(), buffer); - - // read contents from file to G1 element - streaming::read_g2_elements_from_buffer(elems, buffer, buffer_size); - elems[0].to_affine_coordinates(); - result.emplace_back(elems[0]); - - // write back to output buffer and compare - streaming::write_g2_elements_to_buffer(result, out_buffer); - ASSERT(memcmp(buffer, out_buffer, buffer_size)) - << "out_buffer contents: " << out_buffer; -} -*/ - - /* Streaming_ReadG2ElemsToBuffer * * Tests reading arbitrary buffer input to G2 elements diff --git a/setup-tools/test/deepstate-tests/test_compute_range_polynomial.cpp b/setup-tools/test/deepstate-tests/test_compute_range_polynomial.cpp index 3ae6531..ef61a9a 100644 --- a/setup-tools/test/deepstate-tests/test_compute_range_polynomial.cpp +++ b/setup-tools/test/deepstate-tests/test_compute_range_polynomial.cpp @@ -30,17 +30,16 @@ TEST(Range, GeneratorPolynomial) libff::alt_bn128_pp::init_public_params(); - std::vector res1; - std::vector res2; + std::vector res1; + std::vector res2; // compute generator polynomial and store coefficients - std::vector>> subproduct_tree; + std::vector>> subproduct_tree; libfqfft::compute_subproduct_tree(log2(DEGREE), subproduct_tree); res1 = subproduct_tree[log2(DEGREE)][0]; // call generator function, and read stored coefficients from transcript file - generator::compute_generator_polynomial>(DEGREE); - streaming::read_field_elements_from_file(res2, "../setup_db/generator.dat", DEGREE); + res2 = generator::compute_generator_polynomial(DEGREE); // check each indiidual element, res2 may have larger buffer size for (size_t i = 0; i < res1.size(); i++) { @@ -63,19 +62,18 @@ TEST(Range, RangePolynomials) libff::alt_bn128_pp::init_public_params(); - // generate coeffs for generator polynomial - generator::compute_generator_polynomial>(DEGREE); - - // run setup to produce an initial transcript - run_setup("../setup_db", range_index, 1); - // produce output for memory mapping within compute_range_polynomials std::vector generator_polynomial; std::vector g1_x(DEGREE); std::vector g2_x(DEGREE); streaming::Manifest manifest; - streaming::read_field_elements_from_file(generator_polynomial, "../setup_db/generator.dat", DEGREE + 1); + // generate coeffs for generator polynomial + generator_polynomial = generator::compute_generator_polynomial>(DEGREE); + + // run setup to produce an initial transcript + run_setup("../setup_db", range_index, 1); + streaming::read_transcript(g1_x, g2_x, manifest, "../setup_db/transcript0_out.dat"); g1_x.insert(g1_x.begin(), G1::one());