From 4d7694e20c797817dba389a90c6fba6b0fa1182e Mon Sep 17 00:00:00 2001 From: Christopher Patton Date: Thu, 1 Aug 2024 13:41:36 -0700 Subject: [PATCH] poc: Move test vector generation out of unit tests Remove the TEST_VECTOR and TEST_VECTOR_PATH environment variables and move test vector generation logic into a new tool, `gen_test_vec.py`. The test vectors are complete with the following exceptions: 1. We currently don't have test vectors for Prio3MultihotCountVec. This VDAF is new as of draft 10, but we neglected to add test vectors for it. This change checks in the test vectors. 2. The test vector for IdpfBBCGGI21 was regenerated after this IDPF was renamed. Also, as of 01291c2c7c7568f8d31d01aa4d3e9df653d6b447, we are generating this with a proper nonce rather than an adhoc binder. To confirm that nothing has changed, you can modify `gen_test_vec_for_idpf()` by replacing Lines ``` nonce = gen_rand(idpf.NONCE_SIZE) ``` with ``` nonce = bytes([0x73,0x6f,0x6d,0x65,0x20,0x6e,0x6f,0x6e,0x63,0x65]) ``` Also, this change opts in `gen_test_vec.py` and `plot_prio3_multiproof_robustness.py` into linting and type checking. --- .github/workflows/lint-python.yml | 6 +- .github/workflows/test.yml | 2 +- poc/README.md | 10 +- poc/gen_test_vec.py | 394 +++++++++++++++++++++++ poc/tests/idpf_util.py | 36 +-- poc/tests/test_idpf_bbcggi21.py | 6 +- poc/tests/test_vdaf_poplar1.py | 5 +- poc/tests/test_vdaf_prio3.py | 27 +- poc/tests/test_xof.py | 46 +-- poc/vdaf_poc/common.py | 12 - poc/vdaf_poc/vdaf.py | 167 +--------- test_vec/08/IdpfBBCGGI21_0.json | 52 +++ test_vec/08/IdpfPoplar_0.json | 52 --- test_vec/08/Prio3MultihotCountVec_0.json | 58 ++++ 14 files changed, 530 insertions(+), 343 deletions(-) create mode 100644 poc/gen_test_vec.py create mode 100644 test_vec/08/IdpfBBCGGI21_0.json delete mode 100644 test_vec/08/IdpfPoplar_0.json create mode 100644 test_vec/08/Prio3MultihotCountVec_0.json diff --git a/.github/workflows/lint-python.yml b/.github/workflows/lint-python.yml index 6d20cf21..6c1eae10 100644 --- a/.github/workflows/lint-python.yml +++ b/.github/workflows/lint-python.yml @@ -25,11 +25,11 @@ jobs: - name: Run pyflakes working-directory: poc - run: pyflakes vdaf_poc/*.py tests/*.py + run: pyflakes *.py vdaf_poc/*.py tests/*.py - name: Run autopep8 working-directory: poc - run: autopep8 --diff --exit-code vdaf_poc/*.py tests/*.py + run: autopep8 --diff --exit-code *.py vdaf_poc/*.py tests/*.py - name: Run isort working-directory: poc @@ -37,4 +37,4 @@ jobs: - name: Run pylint working-directory: poc - run: pylint --disable=all --enable=redefined-outer-name vdaf_poc/*.py tests/*.py + run: pylint --disable=all --enable=redefined-outer-name *.py vdaf_poc/*.py tests/*.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0321c67d..98ee3e8e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -32,4 +32,4 @@ jobs: - name: Enforce type hints working-directory: poc - run: sage -python -m mypy vdaf_poc/*.py tests/*.py + run: sage -python -m mypy *.py vdaf_poc/*.py tests/*.py diff --git a/poc/README.md b/poc/README.md index afeedfbf..7a845e67 100644 --- a/poc/README.md +++ b/poc/README.md @@ -38,17 +38,15 @@ sage -python -m unittest ## Generating test vectors -To generate test vectors, set environment variable `TEST_VECTOR` to -be `TRUE` when running tests: +To generate test vectors, run: ``` -TEST_VECTOR=TRUE sage -python -m unittest --quiet +sage -python gen_test_vec.py ``` Users can also specify a custom path to generate the test vectors in -environment variable `TEST_VECTOR_PATH`. For example, to generate -test vectors for Prio3 VDAFs into path `test_vec/00`: +environment variable `TEST_VECTOR_PATH`: ``` -TEST_VECTOR=TRUE TEST_VECTOR_PATH=test_vec/00 sage -python -m unittest --quiet tests/test_vdaf_prio3.py +TEST_VECTOR_PATH=path/to/test_vec sage -python gen_test_vec.py ``` diff --git a/poc/gen_test_vec.py b/poc/gen_test_vec.py new file mode 100644 index 00000000..781884fa --- /dev/null +++ b/poc/gen_test_vec.py @@ -0,0 +1,394 @@ +import json +import os +from typing import Any, Generic, Optional, TypedDict, TypeVar, cast + +from vdaf_poc.common import VERSION, print_wrapped_line, to_le_bytes +from vdaf_poc.field import Field128 +from vdaf_poc.idpf import Idpf +from vdaf_poc.vdaf import Vdaf +from vdaf_poc.xof import Xof + +Measurement = TypeVar("Measurement") +AggParam = TypeVar("AggParam") +PublicShare = TypeVar("PublicShare") +InputShare = TypeVar("InputShare") +OutShare = TypeVar("OutShare") +AggShare = TypeVar("AggShare") +AggResult = TypeVar("AggResult") +PrepState = TypeVar("PrepState") +PrepShare = TypeVar("PrepShare") +PrepMessage = TypeVar("PrepMessage") + +# The path where test vectors are generated. +TEST_VECTOR_PATH = os.environ.get('TEST_VECTOR_PATH', + '../test_vec/{:02}'.format(VERSION)) + + +def gen_rand(length: int) -> bytes: + """ + A dummy source of randomness intended for creating reproducible test vectors. + """ + out = [] + for i in range(length): + out.append(i % 256) + return bytes(out) + + +# VDAF + +class VdafPrepTestVectorDict(Generic[Measurement], TypedDict): + measurement: Measurement + nonce: str + input_shares: list[str] + prep_shares: list[list[str]] + prep_messages: list[str] + out_shares: list[list[str]] + rand: str + public_share: str + + +class VdafTestVectorDict(Generic[Measurement, AggParam, AggResult], TypedDict): + shares: int + verify_key: str + agg_param: AggParam + prep: list[VdafPrepTestVectorDict[Measurement]] + agg_shares: list[str] + agg_result: Optional[AggResult] + + +def gen_test_vec_for_vdaf( + vdaf: Vdaf[ + Measurement, + AggParam, + PublicShare, + InputShare, + list[Any], # OutShare + AggShare, + AggResult, + PrepState, + PrepShare, + PrepMessage, + ], + agg_param: AggParam, + measurements: list[Measurement], + test_vec_instance: int, + print_test_vec: bool = True) -> AggResult: + """ + Generate test vectors for a VDAF. + """ + + nonces = [gen_rand(vdaf.NONCE_SIZE) for _ in range(len(measurements))] + verify_key = gen_rand(vdaf.VERIFY_KEY_SIZE) + + test_vec: VdafTestVectorDict[Measurement, AggParam, AggResult] = { + 'shares': vdaf.SHARES, + 'verify_key': verify_key.hex(), + 'agg_param': agg_param, + 'prep': [], + 'agg_shares': [], + 'agg_result': None, # set below + } + type_params = vdaf.test_vec_set_type_param( + cast(dict[str, Any], test_vec) + ) + + out_shares = [] + for (nonce, measurement) in zip(nonces, measurements): + assert len(nonce) == vdaf.NONCE_SIZE + + # Each Client shards its measurement into input shares. + rand = gen_rand(vdaf.RAND_SIZE) + (public_share, input_shares) = \ + vdaf.shard(measurement, nonce, rand) + + prep_test_vec: VdafPrepTestVectorDict[Measurement] = { + 'measurement': measurement, + 'nonce': nonce.hex(), + 'input_shares': [], + 'prep_shares': [[] for _ in range(vdaf.ROUNDS)], + 'prep_messages': [], + 'out_shares': [], + 'rand': rand.hex(), + 'public_share': vdaf.test_vec_encode_public_share( + public_share + ).hex() + } + for input_share in input_shares: + prep_test_vec['input_shares'].append( + vdaf.test_vec_encode_input_share(input_share).hex()) + + # Each Aggregator initializes its preparation state. + prep_states = [] + outbound_prep_shares = [] + for j in range(vdaf.SHARES): + (state, share) = vdaf.prep_init(verify_key, j, + agg_param, + nonce, + public_share, + input_shares[j]) + prep_states.append(state) + outbound_prep_shares.append(share) + + for prep_share in outbound_prep_shares: + prep_test_vec['prep_shares'][0].append( + vdaf.test_vec_encode_prep_share(prep_share).hex()) + + # Aggregators recover their output shares. + for i in range(vdaf.ROUNDS - 1): + prep_msg = vdaf.prep_shares_to_prep(agg_param, + outbound_prep_shares) + prep_test_vec['prep_messages'].append( + vdaf.test_vec_encode_prep_msg(prep_msg).hex()) + + outbound_prep_shares = [] + for j in range(vdaf.SHARES): + out = vdaf.prep_next(prep_states[j], prep_msg) + assert isinstance(out, tuple) + (prep_states[j], prep_share) = out + outbound_prep_shares.append(prep_share) + # REMOVE ME + for prep_share in outbound_prep_shares: + prep_test_vec['prep_shares'][i+1].append( + vdaf.test_vec_encode_prep_share(prep_share).hex() + ) + + # The final outputs of the prepare phase are the output + # shares. + prep_msg = vdaf.prep_shares_to_prep(agg_param, + outbound_prep_shares) + prep_test_vec['prep_messages'].append( + vdaf.test_vec_encode_prep_msg(prep_msg).hex()) + + outbound_out_shares = [] + for j in range(vdaf.SHARES): + out_share = vdaf.prep_next(prep_states[j], prep_msg) + assert not isinstance(out_share, tuple) + outbound_out_shares.append(out_share) + + for out_share in outbound_out_shares: + prep_test_vec['out_shares'].append([ + to_le_bytes(x.as_unsigned(), x.ENCODED_SIZE).hex() + for x in out_share + ]) + test_vec['prep'].append(prep_test_vec) + + out_shares.append(outbound_out_shares) + + # Each Aggregator aggregates its output shares into an + # aggregate share. In a distributed VDAF computation, the + # aggregate shares are sent over the network. + agg_shares = [] + for j in range(vdaf.SHARES): + out_shares_j = [out[j] for out in out_shares] + agg_share_j = vdaf.aggregate(agg_param, out_shares_j) + agg_shares.append(agg_share_j) + # REMOVE ME + test_vec['agg_shares'].append( + vdaf.test_vec_encode_agg_share(agg_share_j).hex()) + + # Collector unshards the aggregate. + num_measurements = len(measurements) + agg_result = vdaf.unshard(agg_param, agg_shares, + num_measurements) + test_vec['agg_result'] = agg_result + if print_test_vec: + pretty_print_vdaf_test_vec(vdaf, test_vec, type_params) + + os.system('mkdir -p {}'.format(TEST_VECTOR_PATH)) + filename = '{}/{}_{}.json'.format( + TEST_VECTOR_PATH, + vdaf.test_vec_name, + test_vec_instance, + ) + with open(filename, 'w', encoding="UTF-8") as f: + json.dump(test_vec, f, indent=4, sort_keys=True) + f.write('\n') + + return agg_result + + +def pretty_print_vdaf_test_vec( + vdaf: Vdaf[ + Measurement, AggParam, Any, Any, Any, Any, AggResult, Any, Any, Any + ], + typed_test_vec: VdafTestVectorDict[Measurement, AggParam, AggResult], + type_params: list[str]) -> None: + test_vec = cast(dict[str, Any], typed_test_vec) + print('---------- {} ---------------'.format(vdaf.test_vec_name)) + for type_param in type_params: + print('{}: {}'.format(type_param, test_vec[type_param])) + print('verify_key: "{}"'.format(test_vec['verify_key'])) + if test_vec['agg_param'] is not None: + print('agg_param: {}'.format(test_vec['agg_param'])) + + for (n, prep_test_vec) in enumerate(test_vec['prep']): + print('upload_{}:'.format(n)) + print(' measurement: {}'.format(prep_test_vec['measurement'])) + print(' nonce: "{}"'.format(prep_test_vec['nonce'])) + print(' public_share: >-') + print_wrapped_line(prep_test_vec['public_share'], tab=4) + + # Shard + for (i, input_share) in enumerate(prep_test_vec['input_shares']): + print(' input_share_{}: >-'.format(i)) + print_wrapped_line(input_share, tab=4) + + # Prepare + for (i, (prep_shares, prep_msg)) in enumerate( + zip(prep_test_vec['prep_shares'], + prep_test_vec['prep_messages'])): + print(' round_{}:'.format(i)) + for (j, prep_share) in enumerate(prep_shares): + print(' prep_share_{}: >-'.format(j)) + print_wrapped_line(prep_share, tab=6) + print(' prep_message: >-') + print_wrapped_line(prep_msg, tab=6) + + for (j, out_shares) in enumerate(prep_test_vec['out_shares']): + print(' out_share_{}:'.format(j)) + for out_share in out_shares: + print(' - {}'.format(out_share)) + + # Aggregate + for (j, agg_share) in enumerate(test_vec['agg_shares']): + print('agg_share_{}: >-'.format(j)) + print_wrapped_line(agg_share, tab=2) + + # Unshard + print('agg_result: {}'.format(test_vec['agg_result'])) + print() + + +# IDPF + +def gen_test_vec_for_idpf(idpf: Idpf, + alpha: int, + test_vec_instance: int) -> None: + beta_inner = [] + for level in range(idpf.BITS - 1): + beta_inner.append([idpf.field_inner(level)] * idpf.VALUE_LEN) + beta_leaf = [idpf.field_leaf(idpf.BITS - 1)] * idpf.VALUE_LEN + rand = gen_rand(idpf.RAND_SIZE) + nonce = gen_rand(idpf.NONCE_SIZE) + (public_share, keys) = idpf.gen(alpha, beta_inner, beta_leaf, nonce, rand) + + printable_beta_inner = [ + [str(elem.as_unsigned()) for elem in value] for value in beta_inner + ] + printable_beta_leaf = [str(elem.as_unsigned()) for elem in beta_leaf] + printable_keys = [key.hex() for key in keys] + test_vec = { + 'bits': int(idpf.BITS), + 'alpha': str(alpha), + 'beta_inner': printable_beta_inner, + 'beta_leaf': printable_beta_leaf, + 'nonce': nonce.hex(), + 'public_share': public_share.hex(), + 'keys': printable_keys, + } + + os.system('mkdir -p {}'.format(TEST_VECTOR_PATH)) + filename = '{}/{}_{}.json'.format(TEST_VECTOR_PATH, idpf.test_vec_name, + test_vec_instance) + with open(filename, 'w') as f: + json.dump(test_vec, f, indent=4, sort_keys=True) + f.write('\n') + + +# XOF + +def gen_test_vec_for_xof(cls: type[Xof]) -> None: + seed = gen_rand(cls.SEED_SIZE) + dst = b'domain separation tag' + binder = b'binder string' + length = 40 + + test_vector = { + 'seed': seed.hex(), + 'dst': dst.hex(), + 'binder': binder.hex(), + 'length': length, + 'derived_seed': None, # set below + 'expanded_vec_field128': None, # set below + } + + derived_seed = cls.derive_seed(seed, dst, binder).hex() + expanded_vec_field128 = Field128.encode_vec( + cls.expand_into_vec(Field128, seed, dst, binder, length)).hex() + test_vector['derived_seed'] = derived_seed + test_vector['expanded_vec_field128'] = expanded_vec_field128 + + print('{}:'.format(cls.test_vec_name)) + print(' seed: "{}"'.format(test_vector['seed'])) + print(' dst: "{}"'.format(test_vector['dst'])) + print(' binder: "{}"'.format(test_vector['binder'])) + print(' length: {}'.format(test_vector['length'])) + print(' derived_seed: "{}"'.format(test_vector['derived_seed'])) + print(' expanded_vec_field128: >-') + print_wrapped_line(expanded_vec_field128, tab=4) + + os.system('mkdir -p {}'.format(TEST_VECTOR_PATH)) + with open('{}/{}.json'.format( + TEST_VECTOR_PATH, cls.__name__), 'w') as f: + json.dump(test_vector, f, indent=4, sort_keys=True) + f.write('\n') + + +if __name__ == '__main__': + from vdaf_poc import idpf_bbcggi21, vdaf_poplar1, vdaf_prio3, xof + + # Prio3 variants + gen_test_vec_for_vdaf(vdaf_prio3.Prio3Count(2), None, [1], 0) + gen_test_vec_for_vdaf(vdaf_prio3.Prio3Count(3), None, [1], 1) + gen_test_vec_for_vdaf(vdaf_prio3.Prio3Sum(2, 8), None, [100], 0) + gen_test_vec_for_vdaf(vdaf_prio3.Prio3Sum(3, 8), None, [100], 1) + gen_test_vec_for_vdaf( + vdaf_prio3.Prio3SumVec(2, 10, 8, 9), + None, + [ + list(range(10)), + [1] * 10, + [255] * 10 + ], + 0, + ) + gen_test_vec_for_vdaf( + vdaf_prio3.Prio3SumVec(3, 3, 16, 7), + None, + [ + [10000, 32000, 9], + [19342, 19615, 3061], + [15986, 24671, 23910] + ], + 1, + ) + gen_test_vec_for_vdaf(vdaf_prio3.Prio3Histogram(2, 4, 2), None, [2], 0) + gen_test_vec_for_vdaf(vdaf_prio3.Prio3Histogram(3, 11, 3), None, [2], 1) + gen_test_vec_for_vdaf( + vdaf_prio3.Prio3MultihotCountVec(2, 4, 2, 2), + None, + [[0, 1, 1, 0]], + 0, + ) + + # Poplar1 + tests = [ + (0, (0, 1)), + (1, (0, 1, 2, 3)), + (2, (0, 2, 4, 6)), + (3, (1, 3, 5, 7, 9, 13, 15)), + ] + for (test_level, prefixes) in tests: + gen_test_vec_for_vdaf( + vdaf_poplar1.Poplar1(4), + (test_level, prefixes), + [0b1101], + test_level, + ) + + # IdpfBBCGGI21 + gen_test_vec_for_idpf(idpf_bbcggi21.IdpfBBCGGI21(2, 10), 0, 0) + + # XOFs + gen_test_vec_for_xof(xof.XofTurboShake128) + gen_test_vec_for_xof(xof.XofFixedKeyAes128) diff --git a/poc/tests/idpf_util.py b/poc/tests/idpf_util.py index 672d9361..9a0e3fd8 100644 --- a/poc/tests/idpf_util.py +++ b/poc/tests/idpf_util.py @@ -1,9 +1,7 @@ -import json -import os from functools import reduce from typing import Sequence -from vdaf_poc.common import TEST_VECTOR_PATH, gen_rand, vec_add +from vdaf_poc.common import gen_rand, vec_add from vdaf_poc.idpf import Idpf @@ -81,35 +79,3 @@ def test_idpf_exhaustive(idpf: Idpf, alpha: int) -> None: if got != want: print('error: {0:b} {1:b} {2}: got {3}; want {4}'.format( alpha, prefix, level, got, want)) - - -def gen_test_vec(idpf: Idpf, alpha: int, test_vec_instance: int) -> None: - beta_inner = [] - for level in range(idpf.BITS - 1): - beta_inner.append([idpf.field_inner(level)] * idpf.VALUE_LEN) - beta_leaf = [idpf.field_leaf(idpf.BITS - 1)] * idpf.VALUE_LEN - rand = gen_rand(idpf.RAND_SIZE) - nonce = gen_rand(idpf.NONCE_SIZE) - (public_share, keys) = idpf.gen(alpha, beta_inner, beta_leaf, nonce, rand) - - printable_beta_inner = [ - [str(elem.as_unsigned()) for elem in value] for value in beta_inner - ] - printable_beta_leaf = [str(elem.as_unsigned()) for elem in beta_leaf] - printable_keys = [key.hex() for key in keys] - test_vec = { - 'bits': int(idpf.BITS), - 'alpha': str(alpha), - 'beta_inner': printable_beta_inner, - 'beta_leaf': printable_beta_leaf, - 'nonce': nonce.hex(), - 'public_share': public_share.hex(), - 'keys': printable_keys, - } - - os.system('mkdir -p {}'.format(TEST_VECTOR_PATH)) - filename = '{}/{}_{}.json'.format(TEST_VECTOR_PATH, idpf.test_vec_name, - test_vec_instance) - with open(filename, 'w') as f: - json.dump(test_vec, f, indent=4, sort_keys=True) - f.write('\n') diff --git a/poc/tests/test_idpf_bbcggi21.py b/poc/tests/test_idpf_bbcggi21.py index ac976a7c..f3f6d48f 100644 --- a/poc/tests/test_idpf_bbcggi21.py +++ b/poc/tests/test_idpf_bbcggi21.py @@ -1,16 +1,14 @@ import unittest from typing import cast -from tests.idpf_util import gen_test_vec, test_idpf, test_idpf_exhaustive -from vdaf_poc.common import TEST_VECTOR, from_be_bytes, gen_rand, vec_add +from tests.idpf_util import test_idpf, test_idpf_exhaustive +from vdaf_poc.common import from_be_bytes, gen_rand, vec_add from vdaf_poc.field import Field from vdaf_poc.idpf_bbcggi21 import IdpfBBCGGI21 class TestIdpfBBCGGI21(unittest.TestCase): def test(self) -> None: - if TEST_VECTOR: - gen_test_vec(IdpfBBCGGI21(2, 10), 0, 0) test_idpf( IdpfBBCGGI21(2, 16), 0b1111000011110000, diff --git a/poc/tests/test_vdaf_poplar1.py b/poc/tests/test_vdaf_poplar1.py index 6abcaa49..c64e55ad 100644 --- a/poc/tests/test_vdaf_poplar1.py +++ b/poc/tests/test_vdaf_poplar1.py @@ -1,6 +1,6 @@ import unittest -from vdaf_poc.common import TEST_VECTOR, from_be_bytes +from vdaf_poc.common import from_be_bytes from vdaf_poc.vdaf import test_vdaf from vdaf_poc.vdaf_poplar1 import Poplar1, get_ancestor @@ -131,5 +131,4 @@ def test_generate_test_vectors(self) -> None: ] for (level, prefixes, expected_result) in tests: agg_param = (int(level), tuple(map(int, prefixes))) - test_vdaf(cls, agg_param, measurements, expected_result, - print_test_vec=TEST_VECTOR, test_vec_instance=level) + test_vdaf(cls, agg_param, measurements, expected_result) diff --git a/poc/tests/test_vdaf_prio3.py b/poc/tests/test_vdaf_prio3.py index 3a4488f2..a29bd283 100644 --- a/poc/tests/test_vdaf_prio3.py +++ b/poc/tests/test_vdaf_prio3.py @@ -3,7 +3,6 @@ from tests.test_flp import FlpTest from tests.test_flp_bbcggi19 import TestAverage -from vdaf_poc.common import TEST_VECTOR from vdaf_poc.field import FftField, Field64, Field128 from vdaf_poc.flp_bbcggi19 import FlpBBCGGI19 from vdaf_poc.vdaf import test_vdaf @@ -53,7 +52,6 @@ def test_prio3sumvec(num_proofs: int, field: type[F]) -> None: [255] * 10 ], list(range(256, 266)), - print_test_vec=False, ) prio3 = Prio3SumVec(3, 3, 16, 7) @@ -66,8 +64,6 @@ def test_prio3sumvec(num_proofs: int, field: type[F]) -> None: [15986, 24671, 23910] ], [45328, 76286, 26980], - print_test_vec=False, - test_vec_instance=1, ) @@ -95,23 +91,21 @@ def test_count(self) -> None: prio3 = Prio3Count(2) assert prio3.ID == 0x00000000 test_vdaf(prio3, None, [0, 1, 1, 0, 1], 3) - test_vdaf(prio3, None, [1], 1, print_test_vec=TEST_VECTOR) + test_vdaf(prio3, None, [1], 1) def test_count_3_shares(self) -> None: prio3 = Prio3Count(3) - test_vdaf(prio3, None, [1], 1, print_test_vec=TEST_VECTOR, - test_vec_instance=1) + test_vdaf(prio3, None, [1], 1) def test_sum(self) -> None: prio3 = Prio3Sum(2, 8) assert prio3.ID == 0x00000001 test_vdaf(prio3, None, [0, 147, 1, 0, 11, 0], 159) - test_vdaf(prio3, None, [100], 100, print_test_vec=TEST_VECTOR) + test_vdaf(prio3, None, [100], 100) def test_sum_3_shares(self) -> None: prio3 = Prio3Sum(3, 8) - test_vdaf(prio3, None, [100], 100, print_test_vec=TEST_VECTOR, - test_vec_instance=1) + test_vdaf(prio3, None, [100], 100) def test_sum_vec(self) -> None: prio3 = Prio3SumVec(2, 10, 8, 9) @@ -131,7 +125,6 @@ def test_sum_vec(self) -> None: [255] * 10 ], list(range(256, 266)), - print_test_vec=TEST_VECTOR, ) def test_sum_vec_3_shares(self) -> None: @@ -145,8 +138,6 @@ def test_sum_vec_3_shares(self) -> None: [15986, 24671, 23910] ], [45328, 76286, 26980], - print_test_vec=TEST_VECTOR, - test_vec_instance=1, ) def test_histogram(self) -> None: @@ -157,15 +148,13 @@ def test_histogram(self) -> None: test_vdaf(prio3, None, [2], [0, 0, 1, 0]) test_vdaf(prio3, None, [3], [0, 0, 0, 1]) test_vdaf(prio3, None, [0, 0, 1, 1, 2, 2, 3, 3], [2, 2, 2, 2]) - test_vdaf(prio3, None, [2], [0, 0, 1, 0], print_test_vec=TEST_VECTOR) + test_vdaf(prio3, None, [2], [0, 0, 1, 0]) prio3 = Prio3Histogram(3, 11, 3) test_vdaf( prio3, None, [2], [0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], - print_test_vec=TEST_VECTOR, - test_vec_instance=1, ) def test_multihot_count_vec(self) -> None: @@ -177,9 +166,7 @@ def test_multihot_count_vec(self) -> None: test_vdaf(prio3, None, [[0, 1, 0, 0]], [0, 1, 0, 0]) test_vdaf(prio3, None, [[0, 1, 1, 0]], [0, 1, 1, 0]) test_vdaf(prio3, None, [[0, 1, 1, 0], [0, 1, 0, 1]], [0, 2, 1, 1]) - test_vdaf( - prio3, None, [[0, 1, 1, 0]], [0, 1, 1, 0], print_test_vec=TEST_VECTOR - ) + test_vdaf(prio3, None, [[0, 1, 1, 0]], [0, 1, 1, 0]) def test_multi_hot_histogram_3_shares(self) -> None: # Prio3MultihotCountVec with length = 11, max_weight = 5, @@ -190,8 +177,6 @@ def test_multi_hot_histogram_3_shares(self) -> None: None, [[1] * 5 + [0] * 6], [1] * 5 + [0] * 6, - print_test_vec=False, - test_vec_instance=1, ) def test_average(self) -> None: diff --git a/poc/tests/test_xof.py b/poc/tests/test_xof.py index ce9974b5..da711b69 100644 --- a/poc/tests/test_xof.py +++ b/poc/tests/test_xof.py @@ -1,9 +1,6 @@ -import json -import os import unittest -from vdaf_poc.common import (TEST_VECTOR, TEST_VECTOR_PATH, format_dst, - gen_rand, print_wrapped_line) +from vdaf_poc.common import format_dst, gen_rand from vdaf_poc.field import Field, Field64, Field128 from vdaf_poc.xof import Xof, XofFixedKeyAes128, XofTurboShake128 @@ -33,43 +30,6 @@ def test_xof(cls: type[Xof], field: type[Field], expanded_len: int) -> None: assert len(expanded_vec) == expanded_len -def generate_test_vector(cls: type[Xof]) -> None: - seed = gen_rand(cls.SEED_SIZE) - dst = b'domain separation tag' - binder = b'binder string' - length = 40 - - test_vector = { - 'seed': seed.hex(), - 'dst': dst.hex(), - 'binder': binder.hex(), - 'length': length, - 'derived_seed': None, # set below - 'expanded_vec_field128': None, # set below - } - - derived_seed = cls.derive_seed(seed, dst, binder).hex() - expanded_vec_field128 = Field128.encode_vec( - cls.expand_into_vec(Field128, seed, dst, binder, length)).hex() - test_vector['derived_seed'] = derived_seed - test_vector['expanded_vec_field128'] = expanded_vec_field128 - - print('{}:'.format(cls.test_vec_name)) - print(' seed: "{}"'.format(test_vector['seed'])) - print(' dst: "{}"'.format(test_vector['dst'])) - print(' binder: "{}"'.format(test_vector['binder'])) - print(' length: {}'.format(test_vector['length'])) - print(' derived_seed: "{}"'.format(test_vector['derived_seed'])) - print(' expanded_vec_field128: >-') - print_wrapped_line(expanded_vec_field128, tab=4) - - os.system('mkdir -p {}'.format(TEST_VECTOR_PATH)) - with open('{}/{}.json'.format( - TEST_VECTOR_PATH, cls.__name__), 'w') as f: - json.dump(test_vector, f, indent=4, sort_keys=True) - f.write('\n') - - class TestXof(unittest.TestCase): def test_rejection_sampling(self) -> None: # This test case was found through brute-force search using this tool: @@ -86,10 +46,6 @@ def test_rejection_sampling(self) -> None: def test_turboshake128(self) -> None: test_xof(XofTurboShake128, Field128, 23) - if TEST_VECTOR: - generate_test_vector(XofTurboShake128) def test_fixedkeyaes128(self) -> None: test_xof(XofFixedKeyAes128, Field128, 23) - if TEST_VECTOR: - generate_test_vector(XofFixedKeyAes128) diff --git a/poc/vdaf_poc/common.py b/poc/vdaf_poc/common.py index 38897059..94586db3 100644 --- a/poc/vdaf_poc/common.py +++ b/poc/vdaf_poc/common.py @@ -6,13 +6,6 @@ # Document version, reved with each draft that contains breaking changes. VERSION = 8 -# If set, then test vectors will be generated. A fixed source of randomness is -# used for `gen_rand()`. -TEST_VECTOR = os.environ.get('TEST_VECTOR', 'false').lower() == 'true' -# The path where test vectors are generated. -TEST_VECTOR_PATH = os.environ.get('TEST_VECTOR_PATH', - '../test_vec/{:02}'.format(VERSION)) - class FieldProtocol(Protocol): def __add__(self, other: Self) -> Self: @@ -43,11 +36,6 @@ def zeros(length: int) -> bytes: def gen_rand(length: int) -> bytes: """Return the requested number of random bytes.""" - if TEST_VECTOR: - out = [] - for i in range(length): - out.append(i % 256) - return bytes(out) return os.urandom(length) diff --git a/poc/vdaf_poc/vdaf.py b/poc/vdaf_poc/vdaf.py index f07d7069..ca58e499 100644 --- a/poc/vdaf_poc/vdaf.py +++ b/poc/vdaf_poc/vdaf.py @@ -1,12 +1,9 @@ """Definition of VDAFs.""" -import json -import os from abc import ABCMeta, abstractmethod -from typing import Any, Generic, Optional, TypedDict, TypeVar, Union, cast +from typing import Any, Generic, TypeVar, Union -from vdaf_poc.common import (TEST_VECTOR_PATH, format_dst, gen_rand, - print_wrapped_line, to_le_bytes) +from vdaf_poc.common import format_dst, gen_rand from vdaf_poc.field import Field Measurement = TypeVar("Measurement") @@ -19,6 +16,7 @@ PrepState = TypeVar("PrepState") PrepShare = TypeVar("PrepShare") PrepMessage = TypeVar("PrepMessage") +F = TypeVar("F", bound=Field) class Vdaf( @@ -209,29 +207,6 @@ def test_vec_encode_prep_msg(self, prep_message: PrepMessage) -> bytes: pass -class PrepTestVectorDict(Generic[Measurement], TypedDict): - measurement: Measurement - nonce: str - input_shares: list[str] - prep_shares: list[list[str]] - prep_messages: list[str] - out_shares: list[list[str]] - rand: str - public_share: str - - -class TestVectorDict(Generic[Measurement, AggParam, AggResult], TypedDict): - shares: int - verify_key: str - agg_param: AggParam - prep: list[PrepTestVectorDict[Measurement]] - agg_shares: list[str] - agg_result: Optional[AggResult] - - -F = TypeVar("F", bound=Field) - - # NOTE: This function is excerpted in the document, as the figure # {{run-vdaf}}. Its width should be limited to 69 columns to avoid # warnings from xml2rfc. @@ -252,9 +227,7 @@ def run_vdaf( verify_key: bytes, agg_param: AggParam, nonces: list[bytes], - measurements: list[Measurement], - print_test_vec: bool = False, - test_vec_instance: int = 0) -> AggResult: + measurements: list[Measurement]) -> AggResult: """ Run the VDAF on a list of measurements. @@ -274,19 +247,6 @@ def run_vdaf( "measurements and nonces lists have different lengths" ) - # REMOVE ME - test_vec: TestVectorDict[Measurement, AggParam, AggResult] = { - 'shares': vdaf.SHARES, - 'verify_key': verify_key.hex(), - 'agg_param': agg_param, - 'prep': [], - 'agg_shares': [], - 'agg_result': None, # set below - } - type_params = vdaf.test_vec_set_type_param( - cast(dict[str, Any], test_vec) - ) - out_shares = [] for (nonce, measurement) in zip(nonces, measurements): assert len(nonce) == vdaf.NONCE_SIZE @@ -296,23 +256,6 @@ def run_vdaf( (public_share, input_shares) = \ vdaf.shard(measurement, nonce, rand) - # REMOVE ME - prep_test_vec: PrepTestVectorDict[Measurement] = { - 'measurement': measurement, - 'nonce': nonce.hex(), - 'input_shares': [], - 'prep_shares': [[] for _ in range(vdaf.ROUNDS)], - 'prep_messages': [], - 'out_shares': [], - 'rand': rand.hex(), - 'public_share': vdaf.test_vec_encode_public_share( - public_share - ).hex() - } - for input_share in input_shares: - prep_test_vec['input_shares'].append( - vdaf.test_vec_encode_input_share(input_share).hex()) - # Each Aggregator initializes its preparation state. prep_states = [] outbound_prep_shares = [] @@ -325,18 +268,10 @@ def run_vdaf( prep_states.append(state) outbound_prep_shares.append(share) - # REMOVE ME - for prep_share in outbound_prep_shares: - prep_test_vec['prep_shares'][0].append( - vdaf.test_vec_encode_prep_share(prep_share).hex()) - # Aggregators recover their output shares. for i in range(vdaf.ROUNDS - 1): prep_msg = vdaf.prep_shares_to_prep(agg_param, outbound_prep_shares) - # REMOVE ME - prep_test_vec['prep_messages'].append( - vdaf.test_vec_encode_prep_msg(prep_msg).hex()) outbound_prep_shares = [] for j in range(vdaf.SHARES): @@ -344,19 +279,11 @@ def run_vdaf( assert isinstance(out, tuple) (prep_states[j], prep_share) = out outbound_prep_shares.append(prep_share) - # REMOVE ME - for prep_share in outbound_prep_shares: - prep_test_vec['prep_shares'][i+1].append( - vdaf.test_vec_encode_prep_share(prep_share).hex() - ) # The final outputs of the prepare phase are the output # shares. prep_msg = vdaf.prep_shares_to_prep(agg_param, outbound_prep_shares) - # REMOVE ME - prep_test_vec['prep_messages'].append( - vdaf.test_vec_encode_prep_msg(prep_msg).hex()) outbound_out_shares = [] for j in range(vdaf.SHARES): @@ -364,14 +291,6 @@ def run_vdaf( assert not isinstance(out_share, tuple) outbound_out_shares.append(out_share) - # REMOVE ME - for out_share in outbound_out_shares: - prep_test_vec['out_shares'].append([ - to_le_bytes(x.as_unsigned(), x.ENCODED_SIZE).hex() - for x in out_share - ]) - test_vec['prep'].append(prep_test_vec) - out_shares.append(outbound_out_shares) # Each Aggregator aggregates its output shares into an @@ -382,84 +301,14 @@ def run_vdaf( out_shares_j = [out[j] for out in out_shares] agg_share_j = vdaf.aggregate(agg_param, out_shares_j) agg_shares.append(agg_share_j) - # REMOVE ME - test_vec['agg_shares'].append( - vdaf.test_vec_encode_agg_share(agg_share_j).hex()) # Collector unshards the aggregate. num_measurements = len(measurements) agg_result = vdaf.unshard(agg_param, agg_shares, num_measurements) - # REMOVE ME - test_vec['agg_result'] = agg_result - if print_test_vec: - pretty_print_vdaf_test_vec(vdaf, test_vec, type_params) - - os.system('mkdir -p {}'.format(TEST_VECTOR_PATH)) - filename = '{}/{}_{}.json'.format( - TEST_VECTOR_PATH, - vdaf.test_vec_name, - test_vec_instance, - ) - with open(filename, 'w', encoding="UTF-8") as f: - json.dump(test_vec, f, indent=4, sort_keys=True) - f.write('\n') - return agg_result -def pretty_print_vdaf_test_vec( - vdaf: Vdaf[ - Measurement, AggParam, Any, Any, Any, Any, AggResult, Any, Any, Any - ], - typed_test_vec: TestVectorDict[Measurement, AggParam, AggResult], - type_params: list[str]) -> None: - test_vec = cast(dict[str, Any], typed_test_vec) - print('---------- {} ---------------'.format(vdaf.test_vec_name)) - for type_param in type_params: - print('{}: {}'.format(type_param, test_vec[type_param])) - print('verify_key: "{}"'.format(test_vec['verify_key'])) - if test_vec['agg_param'] is not None: - print('agg_param: {}'.format(test_vec['agg_param'])) - - for (n, prep_test_vec) in enumerate(test_vec['prep']): - print('upload_{}:'.format(n)) - print(' measurement: {}'.format(prep_test_vec['measurement'])) - print(' nonce: "{}"'.format(prep_test_vec['nonce'])) - print(' public_share: >-') - print_wrapped_line(prep_test_vec['public_share'], tab=4) - - # Shard - for (i, input_share) in enumerate(prep_test_vec['input_shares']): - print(' input_share_{}: >-'.format(i)) - print_wrapped_line(input_share, tab=4) - - # Prepare - for (i, (prep_shares, prep_msg)) in enumerate( - zip(prep_test_vec['prep_shares'], - prep_test_vec['prep_messages'])): - print(' round_{}:'.format(i)) - for (j, prep_share) in enumerate(prep_shares): - print(' prep_share_{}: >-'.format(j)) - print_wrapped_line(prep_share, tab=6) - print(' prep_message: >-') - print_wrapped_line(prep_msg, tab=6) - - for (j, out_shares) in enumerate(prep_test_vec['out_shares']): - print(' out_share_{}:'.format(j)) - for out_share in out_shares: - print(' - {}'.format(out_share)) - - # Aggregate - for (j, agg_share) in enumerate(test_vec['agg_shares']): - print('agg_share_{}: >-'.format(j)) - print_wrapped_line(agg_share, tab=2) - - # Unshard - print('agg_result: {}'.format(test_vec['agg_result'])) - print() - - def test_vdaf( vdaf: Vdaf[ Measurement, @@ -475,9 +324,7 @@ def test_vdaf( ], agg_param: AggParam, measurements: list[Measurement], - expected_agg_result: AggResult, - print_test_vec: bool = False, - test_vec_instance: int = 0) -> None: + expected_agg_result: AggResult) -> None: # Test that the algorithm identifier is in the correct range. assert 0 <= vdaf.ID and vdaf.ID < 2 ** 32 @@ -488,9 +335,7 @@ def test_vdaf( verify_key, agg_param, nonces, - measurements, - print_test_vec, - test_vec_instance) + measurements) if agg_result != expected_agg_result: print('vdaf test failed ({} on {}): unexpected result: got {}; want {}' .format(vdaf.test_vec_name, measurements, agg_result, diff --git a/test_vec/08/IdpfBBCGGI21_0.json b/test_vec/08/IdpfBBCGGI21_0.json new file mode 100644 index 00000000..ab55b44f --- /dev/null +++ b/test_vec/08/IdpfBBCGGI21_0.json @@ -0,0 +1,52 @@ +{ + "alpha": "0", + "beta_inner": [ + [ + "0", + "0" + ], + [ + "1", + "1" + ], + [ + "2", + "2" + ], + [ + "3", + "3" + ], + [ + "4", + "4" + ], + [ + "5", + "5" + ], + [ + "6", + "6" + ], + [ + "7", + "7" + ], + [ + "8", + "8" + ] + ], + "beta_leaf": [ + "9", + "9" + ], + "bits": 10, + "keys": [ + "000102030405060708090a0b0c0d0e0f", + "101112131415161718191a1b1c1d1e1f" + ], + "nonce": "000102030405060708090a0b0c0d0e0f", + "public_share": "54c50d0a193b2b5d22d98b69cacff166bad7c487acc275da62c9494970c8c530691071489a38f73f182c52e1c6581a727a18bbc9dd4eefd9b84dfa08174ae87e6513ca16b15e42fe6bb43bb883c58260a9dc907478cec4bc7e5351049a376a612b03c75446e5941d0bdf4c1a105f1b8365f54a7712051173961d6a6826303ed083ee3e1aeee2db4a9f42a53c4f5c3c05f9909f905e7908c5b7068927912d4319c391d284006c3376c0f1a06307d532000b6b9740ce776f24e0413c8745c9f79d63845fd03f68bd07d0639771680453ea261eba1f5c44f23e702cb3002e98ddd7e36a01f245401c5d7762c85768c23bf06dc6b6b32d482a093504a3d507cda0b8b5e3ff36562cb8263da01ce5e9f801463a7115eb436c15867ab4b64f01400c8c7aabe7868e5e8152a2ee30cd9d7625fb6c261cd2deafe75f81f25701398398d6d2021ce160a9652b9f02223d8ca8af4c45f7023bdceb56b2b1e407cf6ef0f485e0291ea082bc632938f75bd2cf2b4441e8b02f" +} diff --git a/test_vec/08/IdpfPoplar_0.json b/test_vec/08/IdpfPoplar_0.json deleted file mode 100644 index 68f31592..00000000 --- a/test_vec/08/IdpfPoplar_0.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "alpha": "0", - "beta_inner": [ - [ - "0", - "0" - ], - [ - "1", - "1" - ], - [ - "2", - "2" - ], - [ - "3", - "3" - ], - [ - "4", - "4" - ], - [ - "5", - "5" - ], - [ - "6", - "6" - ], - [ - "7", - "7" - ], - [ - "8", - "8" - ] - ], - "beta_leaf": [ - "9", - "9" - ], - "binder": "736f6d65206e6f6e6365", - "bits": 10, - "keys": [ - "000102030405060708090a0b0c0d0e0f", - "101112131415161718191a1b1c1d1e1f" - ], - "public_share": "18850fb4933f36be2d456c4e2477baf2c9a4a7204fb09ae3e3ff925ecfe07a297b07b1980635d1e1f512afbd6388c98f5bf076cc5fe746282e59b81e51bffd6699fac7ec2dab3e9dd2c60d896beb64131d73b8abd7b2f0b327e5d91b42c677468048180e55bd80946611101ff01cb82022f7aecbe1db443499a311b7b9ee114acc16ae30d73d77ab2995eef0d536c26753647394f24d0c7b6db95b0eda08056b919e862c373e9c7a1e17590e5d5f250e56249ad28f9c13a8b57dfd7dcd05d78d49640d0aa35173cb4a659ada59f57d0c408edf52cb16d8bfb5c5d56d0c28a1aa7d96054c848cfeb0eeb6576b7c8c5328dfe502bc5eef4f1c6f58abe9ad37b448995e2b584cdb5f3a94f056ca7b868ad8f593f07c73f88596398fb4f13c5a7393b8fb0d2c76076132f54484f5f18209f42e4fe98ba2b9b1d1d19c139dc4520ada547ddbb70baecf1bba61a7b3cbb4648a7235142e201a173038dac559940a55d29a68cc87298d722b6644fa445460ddb9938c64" -} diff --git a/test_vec/08/Prio3MultihotCountVec_0.json b/test_vec/08/Prio3MultihotCountVec_0.json new file mode 100644 index 00000000..3a54560c --- /dev/null +++ b/test_vec/08/Prio3MultihotCountVec_0.json @@ -0,0 +1,58 @@ +{ + "agg_param": null, + "agg_result": [ + 0, + 1, + 1, + 0 + ], + "agg_shares": [ + "129fa135e4a6587429f0ec43e8fec1f1fc0a9d316faae00e2e674571c599c1ed82e3c48e5fa0656dbb79b79dbf7fd04944c5d7951eddccf4e4b4496e858ed108", + "ef605eca1b59a78bba0f13bc17013e0e06f562ce90551ff1b598ba8e3a663e12801c3b71a05f9a922886486240802fb6bd3a286ae122330bff4ab6917a712ef7" + ], + "chunk_length": 2, + "length": 4, + "max_weight": 2, + "prep": [ + { + "input_shares": [ + "129fa135e4a6587429f0ec43e8fec1f1fc0a9d316faae00e2e674571c599c1ed82e3c48e5fa0656dbb79b79dbf7fd04944c5d7951eddccf4e4b4496e858ed108042a04280d9f72a5acd979a301de4f690dc26ecb37fded6ae9c6561d92606241422f5245526d038f893f227f87d47a5a59b6e6f820a428319c57148faa7b3f330533c7c576a88b2a93ae40a79ba208a74bbe4c84aa3aaed67b4c4127fb4a53da2a52aef277adeea24a6e3a98f78b527667201fcea21d6ce8477b0f81df6e513701b24df25ead4d87d912fbda0d79f31c53d8463f73d89768d7600a87e354e7ee0ea6422c2d59e2824a4e5f154df36f7b12799b964d1f765dd7a43da449b49a4337dd5b1a89eeec6153ad7544df95fccf303132333435363738393a3b3c3d3e3f", + "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f" + ], + "measurement": [ + 0, + 1, + 1, + 0 + ], + "nonce": "000102030405060708090a0b0c0d0e0f", + "out_shares": [ + [ + "129fa135e4a6587429f0ec43e8fec1f1", + "fc0a9d316faae00e2e674571c599c1ed", + "82e3c48e5fa0656dbb79b79dbf7fd049", + "44c5d7951eddccf4e4b4496e858ed108" + ], + [ + "ef605eca1b59a78bba0f13bc17013e0e", + "06f562ce90551ff1b598ba8e3a663e12", + "801c3b71a05f9a922886486240802fb6", + "bd3a286ae122330bff4ab6917a712ef7" + ] + ], + "prep_messages": [ + "8db71399051233d0b68e2d0222409850" + ], + "prep_shares": [ + [ + "2cc3a5084df397ebc58698b22482b4732a5797dbd8da1f3e5bb871d22ca282d450b81d616e02cae535158d0d5eabe011c3b813428ef5ed25f69249b42950404dd33867badeed7f61e48e5b31a7a90158fdd8dbd3e58662a1df3235f64b0fad4198bd0348157dc4f2b6dae85142605a13", + "d53c5af7b20c68141e79674ddb7d4b8cee0dac28532f3cab5bb8d6d5bcba125523ca18b1be334a0b2d5d11a0981976dc21d53d718dbe88606267101d6deae2c099aa7b26a43a38d1404131d68105f98865f251717fc1730893614337f27b42a8643b6b69305fe925052c178ade937d83" + ] + ], + "public_share": "98bd0348157dc4f2b6dae85142605a13643b6b69305fe925052c178ade937d83", + "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f" + } + ], + "shares": 2, + "verify_key": "000102030405060708090a0b0c0d0e0f" +}